You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@atlas.apache.org by ma...@apache.org on 2017/11/12 18:14:10 UTC
[01/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Repository: atlas
Updated Branches:
refs/heads/master 0abf84caa -> 435fe3fba
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java
index a13f069..f25fc5f 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java
@@ -18,23 +18,16 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
@@ -42,6 +35,7 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
+import java.util.Collections;
import java.util.List;
import static org.testng.Assert.assertEquals;
@@ -236,16 +230,16 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
private void createTypes() throws Exception {
createTypeDefinitionsV1();
- HierarchicalTypeDefinition<ClassType> dslTestTypeDefinition = TypesUtil
- .createClassTypeDef("dsl_test_type", ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+ ClassTypeDefinition dslTestTypeDefinition = TypesUtil
+ .createClassTypeDef("dsl_test_type", null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
- HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
- .createTraitTypeDef("Classification", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(classificationTraitDefinition), ImmutableList.of(dslTestTypeDefinition));
+ TraitTypeDefinition classificationTraitDefinition = TypesUtil
+ .createTraitTypeDef("Classification", null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("tag", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ TypesDef typesDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(),
+ Collections.singletonList(classificationTraitDefinition), Collections.singletonList(dslTestTypeDefinition));
createType(typesDef);
}
@@ -259,7 +253,7 @@ public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT {
tagName = randomString();
traitInstance.set("tag", tagName);
- List<String> traits = entityInstance.getTraits();
+ List<String> traits = entityInstance.getTraitNames();
assertEquals(traits.size(), 1);
return createInstance(entityInstance);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java
index c46689c..5ee8d30 100644
--- a/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java
@@ -18,7 +18,6 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasClientV2;
import org.apache.atlas.AtlasServiceException;
@@ -33,7 +32,6 @@ import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
@@ -43,7 +41,9 @@ import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
+import java.util.Arrays;
import java.util.Collections;
+import java.util.HashSet;
import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality;
import static org.apache.atlas.type.AtlasTypeUtil.createClassTypeDef;
@@ -105,7 +105,7 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
@Test
public void testDuplicateCreate() throws Exception {
AtlasEntityDef type = createClassTypeDef(randomString(),
- ImmutableSet.<String>of(), AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"));
+ Collections.<String>emptySet(), AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"));
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getEntityDefs().add(type);
@@ -124,7 +124,7 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
public void testUpdate() throws Exception {
String entityType = randomString();
AtlasEntityDef typeDefinition =
- createClassTypeDef(entityType, ImmutableSet.<String>of(),
+ createClassTypeDef(entityType, Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"));
AtlasTypesDef atlasTypesDef = new AtlasTypesDef();
@@ -136,7 +136,7 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
//Add attribute description
typeDefinition = createClassTypeDef(typeDefinition.getName(),
- ImmutableSet.<String>of(),
+ Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("description", "string"));
@@ -263,10 +263,10 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
@Test
public void testListTypesByFilter() throws Exception {
AtlasAttributeDef attr = AtlasTypeUtil.createOptionalAttrDef("attr", "string");
- AtlasEntityDef classDefA = AtlasTypeUtil.createClassTypeDef("A" + randomString(), ImmutableSet.<String>of(), attr);
- AtlasEntityDef classDefA1 = AtlasTypeUtil.createClassTypeDef("A1" + randomString(), ImmutableSet.of(classDefA.getName()), attr);
- AtlasEntityDef classDefB = AtlasTypeUtil.createClassTypeDef("B" + randomString(), ImmutableSet.<String>of(), attr);
- AtlasEntityDef classDefC = AtlasTypeUtil.createClassTypeDef("C" + randomString(), ImmutableSet.of(classDefB.getName(), classDefA.getName()), attr);
+ AtlasEntityDef classDefA = AtlasTypeUtil.createClassTypeDef("A" + randomString(), Collections.<String>emptySet(), attr);
+ AtlasEntityDef classDefA1 = AtlasTypeUtil.createClassTypeDef("A1" + randomString(), Collections.singleton(classDefA.getName()), attr);
+ AtlasEntityDef classDefB = AtlasTypeUtil.createClassTypeDef("B" + randomString(), Collections.<String>emptySet(), attr);
+ AtlasEntityDef classDefC = AtlasTypeUtil.createClassTypeDef("C" + randomString(), new HashSet<>(Arrays.asList(classDefB.getName(), classDefA.getName())), attr);
AtlasTypesDef atlasTypesDef = new AtlasTypesDef();
atlasTypesDef.getEntityDefs().add(classDefA);
@@ -297,19 +297,19 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
AtlasTypesDef atlasTypesDef = new AtlasTypesDef();
AtlasEntityDef databaseTypeDefinition =
- createClassTypeDef("database", ImmutableSet.<String>of(),
+ createClassTypeDef("database", Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("description", "string"));
atlasTypesDef.getEntityDefs().add(databaseTypeDefinition);
AtlasEntityDef tableTypeDefinition =
- createClassTypeDef("table", ImmutableSet.<String>of(),
+ createClassTypeDef("table", Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("description", "string"),
- AtlasTypeUtil.createOptionalAttrDef("columnNames", DataTypes.arrayTypeName("string")),
+ AtlasTypeUtil.createOptionalAttrDef("columnNames", AtlasBaseTypeDef.getArrayTypeName("string")),
AtlasTypeUtil.createOptionalAttrDef("created", "date"),
AtlasTypeUtil.createOptionalAttrDef("parameters",
- DataTypes.mapTypeName("string", "string")),
+ AtlasBaseTypeDef.getMapTypeName("string", "string")),
AtlasTypeUtil.createRequiredAttrDef("type", "string"),
new AtlasAttributeDef("database", "database",
false,
@@ -319,7 +319,7 @@ public class TypedefsJerseyResourceIT extends BaseResourceIT {
atlasTypesDef.getEntityDefs().add(tableTypeDefinition);
AtlasClassificationDef fetlTypeDefinition = AtlasTypeUtil
- .createTraitTypeDef("fetl", ImmutableSet.<String>of(),
+ .createTraitTypeDef("fetl", Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
atlasTypesDef.getClassificationDefs().add(fetlTypeDefinition);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java
index 351e5ae..cd876f4 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java
@@ -18,23 +18,14 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization$;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.type.AtlasType;
import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
@@ -44,11 +35,9 @@ import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
+import java.util.*;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
+import static org.apache.atlas.v1.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.fail;
@@ -76,9 +65,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
public void testSubmit() throws Exception {
for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) {
try{
- atlasClientV1.getType(typeDefinition.typeName);
+ atlasClientV1.getType(typeDefinition.getTypeName());
} catch (AtlasServiceException ase){
- String typesAsJSON = TypesSerialization.toJson(typeDefinition, false);
+ String typesAsJSON = AtlasType.toV1Json(typeDefinition);
System.out.println("typesAsJSON = " + typesAsJSON);
JSONObject response = atlasClientV1.callAPIWithBody(AtlasClient.API_V1.CREATE_TYPE, typesAsJSON);
@@ -87,18 +76,18 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
JSONArray typesAdded = response.getJSONArray(AtlasClient.TYPES);
assertEquals(typesAdded.length(), 1);
- assertEquals(typesAdded.getJSONObject(0).getString(NAME), typeDefinition.typeName);
+ assertEquals(typesAdded.getJSONObject(0).getString(NAME), typeDefinition.getTypeName());
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));}
}
}
@Test
public void testDuplicateSubmit() throws Exception {
- HierarchicalTypeDefinition<ClassType> type = TypesUtil.createClassTypeDef(randomString(),
- ImmutableSet.<String>of(), TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE));
+ ClassTypeDefinition type = TypesUtil.createClassTypeDef(randomString(), null,
+ Collections.<String>emptySet(), TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING));
TypesDef typesDef =
- TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.of(type));
+ new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(),
+ Collections.<TraitTypeDefinition>emptyList(), Collections.singletonList(type));
atlasClientV1.createType(typesDef);
try {
@@ -111,51 +100,49 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
@Test
public void testUpdate() throws Exception {
- HierarchicalTypeDefinition<ClassType> typeDefinition = TypesUtil
- .createClassTypeDef(randomString(), null, "1.0", ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE));
- List<String> typesCreated = atlasClientV1.createType(TypesSerialization.toJson(typeDefinition, false));
+ ClassTypeDefinition typeDefinition = TypesUtil
+ .createClassTypeDef(randomString(), null, "1.0", Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ List<String> typesCreated = atlasClientV1.createType(AtlasType.toV1Json(typeDefinition));
assertEquals(typesCreated.size(), 1);
- assertEquals(typesCreated.get(0), typeDefinition.typeName);
+ assertEquals(typesCreated.get(0), typeDefinition.getTypeName());
//Add attribute description
- typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.typeName, null, "2.0",
- ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createOptionalAttrDef(DESCRIPTION, DataTypes.STRING_TYPE));
- TypesDef typeDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(typeDefinition));
+ typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.getTypeName(), null, "2.0",
+ Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ createOptionalAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ TypesDef typeDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(), Collections.<TraitTypeDefinition>emptyList(), Collections.singletonList(typeDefinition));
List<String> typesUpdated = atlasClientV1.updateType(typeDef);
assertEquals(typesUpdated.size(), 1);
- Assert.assertTrue(typesUpdated.contains(typeDefinition.typeName));
+ Assert.assertTrue(typesUpdated.contains(typeDefinition.getTypeName()));
- TypesDef updatedTypeDef = atlasClientV1.getType(typeDefinition.typeName);
+ TypesDef updatedTypeDef = atlasClientV1.getType(typeDefinition.getTypeName());
assertNotNull(updatedTypeDef);
- HierarchicalTypeDefinition<ClassType> updatedType = updatedTypeDef.classTypesAsJavaList().get(0);
- assertEquals(updatedType.attributeDefinitions.length, 2);
+ ClassTypeDefinition updatedType = updatedTypeDef.getClassTypes().get(0);
+ assertEquals(updatedType.getAttributeDefinitions().size(), 2);
}
@Test(dependsOnMethods = "testSubmit")
public void testGetDefinition() throws Exception {
for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) {
- System.out.println("typeName = " + typeDefinition.typeName);
+ System.out.println("typeName = " + typeDefinition.getTypeName());
- JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LIST_TYPES, null, typeDefinition.typeName);
+ JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.LIST_TYPES, null, typeDefinition.getTypeName());
Assert.assertNotNull(response);
Assert.assertNotNull(response.get(AtlasClient.DEFINITION));
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
String typesJson = response.getString(AtlasClient.DEFINITION);
- final TypesDef typesDef = TypesSerialization.fromJson(typesJson);
- List<HierarchicalTypeDefinition<ClassType>> hierarchicalTypeDefinitions = typesDef.classTypesAsJavaList();
- for (HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) {
- for (AttributeDefinition attrDef : classType.attributeDefinitions) {
- if (NAME.equals(attrDef.name)) {
- assertEquals(attrDef.isIndexable, true);
- assertEquals(attrDef.isUnique, true);
+ final TypesDef typesDef = AtlasType.fromV1Json(typesJson, TypesDef.class);
+ List<ClassTypeDefinition> hierarchicalTypeDefinitions = typesDef.getClassTypes();
+ for (ClassTypeDefinition classType : hierarchicalTypeDefinitions) {
+ for (AttributeDefinition attrDef : classType.getAttributeDefinitions()) {
+ if (NAME.equals(attrDef.getName())) {
+ assertEquals(attrDef.getIsIndexable(), true);
+ assertEquals(attrDef.getIsUnique(), true);
}
}
}
@@ -202,15 +189,15 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
@Test
public void testListTypesByFilter() throws Exception {
- AttributeDefinition attr = TypesUtil.createOptionalAttrDef("attr", DataTypes.STRING_TYPE);
- String a = createType(TypesSerialization.toJson(
- TypesUtil.createClassTypeDef("A" + randomString(), ImmutableSet.<String>of(), attr), false)).get(0);
- String a1 = createType(TypesSerialization.toJson(
- TypesUtil.createClassTypeDef("A1" + randomString(), ImmutableSet.of(a), attr), false)).get(0);
- String b = createType(TypesSerialization.toJson(
- TypesUtil.createClassTypeDef("B" + randomString(), ImmutableSet.<String>of(), attr), false)).get(0);
- String c = createType(TypesSerialization.toJson(
- TypesUtil.createClassTypeDef("C" + randomString(), ImmutableSet.of(a, b), attr), false)).get(0);
+ AttributeDefinition attr = TypesUtil.createOptionalAttrDef("attr", AtlasBaseTypeDef.ATLAS_TYPE_STRING);
+ String a = createType(AtlasType.toV1Json(
+ TypesUtil.createClassTypeDef("A" + randomString(), null, Collections.<String>emptySet(), attr))).get(0);
+ String a1 = createType(AtlasType.toV1Json(
+ TypesUtil.createClassTypeDef("A1" + randomString(), null, Collections.singleton(a), attr))).get(0);
+ String b = createType(AtlasType.toV1Json(
+ TypesUtil.createClassTypeDef("B" + randomString(), null, Collections.<String>emptySet(), attr))).get(0);
+ String c = createType(AtlasType.toV1Json(
+ TypesUtil.createClassTypeDef("C" + randomString(), null, new HashSet<>(Arrays.asList(a, b)), attr))).get(0);
List<String> results = atlasClientV1.listTypes(DataTypes.TypeCategory.CLASS, a, b);
assertEquals(results, Arrays.asList(a1), "Results: " + results);
@@ -220,9 +207,9 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
String[] traitNames = {"class_trait", "secure_trait", "pii_trait", "ssn_trait", "salary_trait", "sox_trait",};
for (String traitName : traitNames) {
- HierarchicalTypeDefinition<TraitType> traitTypeDef =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String json = TypesSerialization$.MODULE$.toJson(traitTypeDef, true);
+ TraitTypeDefinition traitTypeDef =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String json = AtlasType.toV1Json(traitTypeDef);
createType(json);
}
@@ -232,29 +219,29 @@ public class TypesJerseyResourceIT extends BaseResourceIT {
private List<HierarchicalTypeDefinition> createHiveTypes() throws Exception {
ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
- HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil
- .createClassTypeDef("database", ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, DataTypes.STRING_TYPE));
+ ClassTypeDefinition databaseTypeDefinition = TypesUtil
+ .createClassTypeDef("database", null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING));
typeDefinitions.add(databaseTypeDefinition);
- HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil
- .createClassTypeDef("table", ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, DataTypes.STRING_TYPE),
- createOptionalAttrDef("columnNames", DataTypes.arrayTypeName(DataTypes.STRING_TYPE)),
- createOptionalAttrDef("created", DataTypes.DATE_TYPE),
+ ClassTypeDefinition tableTypeDefinition = TypesUtil
+ .createClassTypeDef("table", null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ createOptionalAttrDef("columnNames", AtlasBaseTypeDef.getArrayTypeName(AtlasBaseTypeDef.ATLAS_TYPE_STRING)),
+ createOptionalAttrDef("created", AtlasBaseTypeDef.ATLAS_TYPE_DATE),
createOptionalAttrDef("parameters",
- DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE)),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+ AtlasBaseTypeDef.getMapTypeName(AtlasBaseTypeDef.ATLAS_TYPE_STRING, AtlasBaseTypeDef.ATLAS_TYPE_STRING)),
+ TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
new AttributeDefinition("database", "database", Multiplicity.REQUIRED, false, "database"));
typeDefinitions.add(tableTypeDefinition);
- HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil
- .createTraitTypeDef("fetl", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
+ TraitTypeDefinition fetlTypeDefinition = TypesUtil
+ .createTraitTypeDef("fetl", null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("level", AtlasBaseTypeDef.ATLAS_TYPE_INT));
typeDefinitions.add(fetlTypeDefinition);
return typeDefinitions;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java b/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java
index 7ad2f76..25f39f2 100644
--- a/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java
+++ b/webapp/src/test/java/org/apache/atlas/web/service/ActiveInstanceStateTest.java
@@ -34,8 +34,8 @@ import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
-import scala.actors.threadpool.Arrays;
+import java.util.Arrays;
import java.nio.charset.Charset;
import static org.mockito.Mockito.mock;
[09/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java
deleted file mode 100755
index 036d18d..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphWalker.java
+++ /dev/null
@@ -1,226 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Queue;
-import java.util.Set;
-
-/**
- * Given a IReferenceableInstance, a Walker will traverse the Object Graph
- * reachable form the instance. It will invoke the process call on the provided NodeProcessor
- * for each non-primitive attribute (Structs, Traits, References, Arrays of Non-Primitives, Maps
- * of Non-Primitives)
- */
-public class ObjectGraphWalker {
-
- final Queue<IReferenceableInstance> queue;
- final TypeSystem typeSystem;
- final NodeProcessor nodeProcessor;
- Set<Id> processedIds;
-
- public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor) throws AtlasException {
- this(typeSystem, nodeProcessor, (IReferenceableInstance) null);
- }
-
- public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor, IReferenceableInstance start)
- throws AtlasException {
- this.typeSystem = typeSystem;
- this.nodeProcessor = nodeProcessor;
- queue = new LinkedList<>();
- processedIds = new HashSet<>();
- if (start != null) {
- visitReferenceableInstance(start);
- }
- }
-
- public ObjectGraphWalker(TypeSystem typeSystem, NodeProcessor nodeProcessor,
- List<? extends IReferenceableInstance> roots) throws AtlasException {
- this.typeSystem = typeSystem;
- this.nodeProcessor = nodeProcessor;
- queue = new LinkedList<>();
- processedIds = new HashSet<>();
- for (IReferenceableInstance r : roots) {
- visitReferenceableInstance(r);
- }
- }
-
- public void walk() throws AtlasException {
- while (!queue.isEmpty()) {
- IReferenceableInstance r = queue.poll();
- if(r != null) {
- processReferenceableInstance(r);
- }
- }
- }
-
- public void addRoot(IReferenceableInstance root) {
- visitReferenceableInstance(root);
- }
-
- void traverseValue(IDataType dT, Object val) throws AtlasException {
- if (val != null) {
- if (dT.getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- IDataType elemType = ((DataTypes.ArrayType) dT).getElemType();
- visitCollection(elemType, val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.MAP) {
- IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
- IDataType valueType = ((DataTypes.MapType) dT).getValueType();
- visitMap(keyType, valueType, val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- visitStruct(val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- visitReferenceableInstance(val);
- }
- }
- }
-
- void visitMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
- if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
- && valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
- return;
- }
-
- if (val != null) {
- Iterator<Map.Entry> it = null;
- if (Map.class.isAssignableFrom(val.getClass())) {
- it = ((Map) val).entrySet().iterator();
- ImmutableMap.Builder b = ImmutableMap.builder();
- while (it.hasNext()) {
- Map.Entry e = it.next();
- traverseValue(keyType, e.getKey());
- traverseValue(valueType, e.getValue());
- }
- }
- }
- }
-
- void visitCollection(IDataType elemType, Object val) throws AtlasException {
-
- if (elemType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
- return;
- }
-
- if (val != null) {
- Iterator it = null;
- if (val instanceof Collection) {
- it = ((Collection) val).iterator();
- } else if (val instanceof Iterable) {
- it = ((Iterable) val).iterator();
- } else if (val instanceof Iterator) {
- it = (Iterator) val;
- }
- if (it != null) {
- DataTypes.TypeCategory elemCategory = elemType.getTypeCategory();
- while (it.hasNext()) {
- Object elem = it.next();
- traverseValue(elemType, elem);
- }
- }
- }
- }
-
- void visitStruct(Object val) throws AtlasException {
-
- if (val == null || !(val instanceof IStruct)) {
- return;
- }
-
- IStruct i = (IStruct) val;
-
- IConstructableType type = typeSystem.getDataType(IConstructableType.class, i.getTypeName());
-
- for (Map.Entry<String, AttributeInfo> e : type.fieldMapping().fields.entrySet()) {
- AttributeInfo aInfo = e.getValue();
- String attrName = e.getKey();
- if (aInfo.dataType().getTypeCategory() != DataTypes.TypeCategory.PRIMITIVE) {
- Object aVal = i.get(attrName);
- nodeProcessor.processNode(new Node(i, attrName, aInfo, aVal));
- traverseValue(aInfo.dataType(), aVal);
- }
- }
- }
-
- void visitReferenceableInstance(Object val) {
-
- if (val == null || !(val instanceof IReferenceableInstance)) {
- return;
- }
-
- IReferenceableInstance ref = (IReferenceableInstance) val;
-
- if (!processedIds.contains(ref.getId())) {
- processedIds.add(ref.getId());
- if (!(ref instanceof Id)) {
- queue.add(ref);
- }
- }
- }
-
- void processReferenceableInstance(IReferenceableInstance ref) throws AtlasException {
-
- nodeProcessor.processNode(new Node(ref, null, null, null));
- visitStruct(ref);
- ImmutableList<String> traits = ref.getTraits();
- for (String trait : traits) {
- visitStruct(ref.getTrait(trait));
- }
- }
-
- public interface NodeProcessor {
-
- void processNode(Node nd) throws AtlasException;
- }
-
- /**
- * Represents a non-primitive value of an instance.
- */
- public static class Node {
- public final IStruct instance;
- public final String attributeName;
- public final AttributeInfo aInfo;
- public final Object value;
-
- public Node(IStruct instance, String attributeName, AttributeInfo aInfo, Object value) {
- this.instance = instance;
- this.attributeName = attributeName;
- this.aInfo = aInfo;
- this.value = value;
- }
-
- @Override
- public String toString(){
- StringBuilder string = new StringBuilder().append(instance).append(aInfo).append(value);
- return string.toString();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructType.java
deleted file mode 100755
index 57f2517..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructType.java
+++ /dev/null
@@ -1,280 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedStruct;
-
-public class StructType extends AbstractDataType<IStruct> implements IConstructableType<IStruct, ITypedStruct> {
-
- public final TypeSystem typeSystem;
- public final FieldMapping fieldMapping;
- public final Map<AttributeInfo, List<String>> infoToNameMap;
- public final int numFields;
- private final TypedStructHandler handler;
-
- protected StructType(TypeSystem typeSystem, String name, String description, int numFields) {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, numFields);
- }
-
- protected StructType(TypeSystem typeSystem, String name, String description, String version, int numFields) {
- super(name, description, version);
- this.typeSystem = typeSystem;
- this.fieldMapping = null;
- infoToNameMap = null;
- this.numFields = numFields;
- this.handler = null;
- }
-
- protected StructType(TypeSystem typeSystem, String name, String description, AttributeInfo... fields)
- throws AtlasException {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, fields);
- }
-
- protected StructType(TypeSystem typeSystem, String name, String description, String version, AttributeInfo... fields)
- throws AtlasException {
- super(name, description, version);
- this.typeSystem = typeSystem;
- this.fieldMapping = constructFieldMapping(fields);
- infoToNameMap = TypeUtils.buildAttrInfoToNameMap(this.fieldMapping);
- this.numFields = this.fieldMapping.fields.size();
- this.handler = new TypedStructHandler(this);
- }
-
- public FieldMapping fieldMapping() {
- return fieldMapping;
- }
-
- /**
- * Validate that current definition can be updated with the new definition
- * @param newType
- * @return true if the current definition can be updated with the new definition, else false
- */
- @Override
- public void validateUpdate(IDataType newType) throws TypeUpdateException {
- super.validateUpdate(newType);
-
- StructType newStructType = (StructType) newType;
- try {
- TypeUtils.validateUpdate(fieldMapping, newStructType.fieldMapping);
- } catch (TypeUpdateException e) {
- throw new TypeUpdateException(newType, e);
- }
- }
-
- protected FieldMapping constructFieldMapping(AttributeInfo... fields)
- throws AtlasException {
-
- Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<>();
- Map<String, Integer> fieldPos = new HashMap<>();
- Map<String, Integer> fieldNullPos = new HashMap<>();
- int numBools = 0;
- int numBytes = 0;
- int numShorts = 0;
- int numInts = 0;
- int numLongs = 0;
- int numFloats = 0;
- int numDoubles = 0;
- int numBigInts = 0;
- int numBigDecimals = 0;
- int numDates = 0;
- int numStrings = 0;
- int numArrays = 0;
- int numMaps = 0;
- int numStructs = 0;
- int numReferenceables = 0;
-
- for (AttributeInfo i : fields) {
- if (fieldsMap.containsKey(i.name)) {
- throw new AtlasException(
- String.format("Struct defintion cannot contain multiple fields with the same " + "name %s",
- i.name));
- }
- fieldsMap.put(i.name, i);
- fieldNullPos.put(i.name, fieldNullPos.size());
- if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- fieldPos.put(i.name, numBools);
- numBools++;
- } else if (i.dataType() == DataTypes.BYTE_TYPE) {
- fieldPos.put(i.name, numBytes);
- numBytes++;
- } else if (i.dataType() == DataTypes.SHORT_TYPE) {
- fieldPos.put(i.name, numShorts);
- numShorts++;
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- fieldPos.put(i.name, numInts);
- numInts++;
- } else if (i.dataType() == DataTypes.LONG_TYPE) {
- fieldPos.put(i.name, numLongs);
- numLongs++;
- } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
- fieldPos.put(i.name, numFloats);
- numFloats++;
- } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
- fieldPos.put(i.name, numDoubles);
- numDoubles++;
- } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- fieldPos.put(i.name, numBigInts);
- numBigInts++;
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- fieldPos.put(i.name, numBigDecimals);
- numBigDecimals++;
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- fieldPos.put(i.name, numDates);
- numDates++;
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- fieldPos.put(i.name, numStrings);
- numStrings++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) {
- fieldPos.put(i.name, numInts);
- numInts++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- fieldPos.put(i.name, numArrays);
- numArrays++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- fieldPos.put(i.name, numMaps);
- numMaps++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- fieldPos.put(i.name, numStructs);
- numStructs++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- fieldPos.put(i.name, numReferenceables);
- numReferenceables++;
- } else {
- throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
- }
- }
-
- return new FieldMapping(fieldsMap, fieldPos, fieldNullPos, numBools, numBytes, numShorts, numInts, numLongs,
- numFloats, numDoubles, numBigInts, numBigDecimals, numDates, numStrings, numArrays, numMaps, numStructs,
- numReferenceables);
- }
-
-
- @Override
- public DataTypes.TypeCategory getTypeCategory() {
- return DataTypes.TypeCategory.STRUCT;
- }
-
- @Override
- public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
- return handler.convert(val, m);
- }
-
- public ITypedStruct createInstance() {
- return handler.createInstance();
- }
-
- @Override
- public void output(IStruct s, Appendable buf, String prefix, Set<IStruct> inProcess) throws AtlasException {
- handler.output(s, buf, prefix, inProcess);
- }
-
- @Override
- public String toString() {
- StringBuilder buf = new StringBuilder();
- try {
- output(buf, new HashSet<String>());
- }
- catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- return buf.toString();
- }
-
- @Override
- public void output(Appendable buf, Set<String> typesInProcess) throws AtlasException {
-
- if (typesInProcess == null) {
- typesInProcess = new HashSet<>();
- }
- else if (typesInProcess.contains(name)) {
- // Avoid infinite recursion on bi-directional reference attributes.
- try {
- buf.append(name);
- } catch (IOException e) {
- throw new AtlasException(e);
- }
- return;
- }
-
- typesInProcess.add(name);
- try {
- buf.append(getClass().getSimpleName());
- buf.append("{name=").append(name);
- buf.append(", description=").append(description);
- buf.append(", fieldMapping.fields=[");
- Iterator<AttributeInfo> it = fieldMapping.fields.values().iterator();
- while (it.hasNext()) {
- AttributeInfo attrInfo = it.next();
- attrInfo.output(buf, typesInProcess);
- if (it.hasNext()) {
- buf.append(", ");
- }
- else {
- buf.append(']');
- }
- }
- buf.append("}");
- }
- catch(IOException e) {
- throw new AtlasException(e);
- }
- finally {
- typesInProcess.remove(name);
- }
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if( !(val instanceof ITypedStruct)) {
- throw new IllegalArgumentException("Unexpected value type " + val.getClass().getSimpleName() + ". Expected instance of ITypedStruct");
- }
- digester.update(getName().getBytes(Charset.forName("UTF-8")));
-
- if(fieldMapping.fields != null && val != null) {
- IStruct typedValue = (IStruct) val;
- for (AttributeInfo aInfo : fieldMapping.fields.values()) {
- Object attrVal = typedValue.get(aInfo.name);
- if(attrVal != null) {
- aInfo.dataType().updateSignatureHash(digester, attrVal);
- }
- }
- }
- }
-
- public List<String> getNames(AttributeInfo info) {
- return infoToNameMap.get(info);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructTypeDefinition.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructTypeDefinition.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructTypeDefinition.java
deleted file mode 100755
index 4f8695b..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/StructTypeDefinition.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.utils.ParamChecker;
-
-import java.util.Arrays;
-import java.util.Objects;
-
-public class StructTypeDefinition {
-
- public final String typeName;
- public final String typeDescription;//optional field
- public final String typeVersion;
- public final AttributeDefinition[] attributeDefinitions;
-
- protected StructTypeDefinition(String typeName, String typeDescription, boolean validate,
- AttributeDefinition... attributeDefinitions) {
- this(typeName, typeDescription, AtlasConstants.DEFAULT_TYPE_VERSION, validate, attributeDefinitions);
- }
-
- protected StructTypeDefinition(String typeName, String typeDescription, String typeVersion, boolean validate,
- AttributeDefinition... attributeDefinitions) {
- this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
- this.typeDescription = typeDescription;
- if (validate) {
- ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
- }
- this.attributeDefinitions = attributeDefinitions;
- this.typeVersion = typeVersion;
- }
-
- public StructTypeDefinition(String typeName, AttributeDefinition[] attributeDefinitions) {
- this(typeName, null, AtlasConstants.DEFAULT_TYPE_VERSION, attributeDefinitions);
- }
-
- public StructTypeDefinition(String typeName, String typeDescription,
- AttributeDefinition[] attributeDefinitions) {
-
- this(typeName, typeDescription, AtlasConstants.DEFAULT_TYPE_VERSION, attributeDefinitions);
- }
-
- public StructTypeDefinition(String typeName, String typeDescription, String typeVersion,
- AttributeDefinition[] attributeDefinitions) {
- this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
- this.typeDescription = typeDescription;
- this.typeVersion = typeVersion;
- this.attributeDefinitions = ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
- }
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- StructTypeDefinition that = (StructTypeDefinition) o;
- return Objects.equals(typeName, that.typeName) &&
- Objects.equals(typeDescription, that.typeDescription) &&
- Objects.equals(typeVersion, that.typeVersion) &&
- Arrays.equals(attributeDefinitions, that.attributeDefinitions);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(typeName, typeDescription, typeVersion, attributeDefinitions);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/TraitType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TraitType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/TraitType.java
deleted file mode 100755
index bbb845a..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TraitType.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedStruct;
-
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class TraitType extends HierarchicalType<TraitType, IStruct>
- implements IConstructableType<IStruct, ITypedStruct> {
-
- public final Map<AttributeInfo, List<String>> infoToNameMap;
- private final TypedStructHandler handler;
-
- TraitType(TypeSystem typeSystem, String name, String description, ImmutableSet<String> superTraits, int numFields) {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTraits, numFields);
- }
-
- TraitType(TypeSystem typeSystem, String name, String description, String version, ImmutableSet<String> superTraits, int numFields) {
- super(typeSystem, TraitType.class, name, description, version, superTraits, numFields);
- handler = null;
- infoToNameMap = null;
- }
-
- TraitType(TypeSystem typeSystem, String name, String description, ImmutableSet<String> superTraits, AttributeInfo... fields)
- throws AtlasException {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTraits, fields);
- }
-
- TraitType(TypeSystem typeSystem, String name, String description, String version, ImmutableSet<String> superTraits, AttributeInfo... fields)
- throws AtlasException {
- super(typeSystem, TraitType.class, name, description, version, superTraits, fields);
- handler = new TypedStructHandler(this);
- infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
- }
-
- @Override
- public DataTypes.TypeCategory getTypeCategory() {
- return DataTypes.TypeCategory.TRAIT;
- }
-
- @Override
- public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
- return handler.convert(val, m);
- }
-
- public ITypedStruct createInstance() {
- return handler.createInstance();
- }
-
- @Override
- public void output(IStruct s, Appendable buf, String prefix, Set<IStruct> inProcess) throws AtlasException {
- handler.output(s, buf, prefix, inProcess);
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if( !(val instanceof ITypedStruct)) {
- throw new IllegalArgumentException("Unexpected value type " + val.getClass().getSimpleName() + ". Expected instance of ITypedStruct");
- }
- digester.update(getName().getBytes(Charset.forName("UTF-8")));
-
- if(fieldMapping.fields != null && val != null) {
- IStruct typedValue = (IStruct) val;
- for (AttributeInfo aInfo : fieldMapping.fields.values()) {
- Object attrVal = typedValue.get(aInfo.name);
- if(attrVal != null) {
- aInfo.dataType().updateSignatureHash(digester, attrVal);
- }
- }
- }
- }
-
- @Override
- public List<String> getNames(AttributeInfo info) {
- return infoToNameMap.get(info);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeSystem.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeSystem.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeSystem.java
deleted file mode 100755
index 262f784..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeSystem.java
+++ /dev/null
@@ -1,821 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.classification.InterfaceAudience;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.exception.TypeExistsException;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.types.cache.DefaultTypeCache;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.inject.Singleton;
-import java.lang.reflect.Constructor;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.ConcurrentHashMap;
-
-@Singleton
-@InterfaceAudience.Private
-@Deprecated
-public class TypeSystem {
- private static final Logger LOG = LoggerFactory.getLogger(TypeSystem.class);
-
- private static final TypeSystem INSTANCE = new TypeSystem();
- private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal<SimpleDateFormat>() {
- @Override
- public SimpleDateFormat initialValue() {
- SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
- dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
- return dateFormat;
- }
- };
-
- private TypeCache typeCache = new DefaultTypeCache();
- private IdType idType;
- private Map<String, IDataType> coreTypes;
-
- public TypeSystem() {
- initialize();
- }
-
- public static TypeSystem getInstance() {
- return INSTANCE;
- }
-
- /**
- * This is only used for testing purposes. Not intended for public use.
- */
- @InterfaceAudience.Private
- public TypeSystem reset() {
-
- typeCache.clear(); // clear all entries in cache
- initialize();
-
- return this;
- }
-
- public void setTypeCache(TypeCache typeCache) {
- this.typeCache = typeCache;
- }
-
- private void initialize() {
-
- coreTypes = new ConcurrentHashMap<>();
-
- registerPrimitiveTypes();
- registerCoreTypes();
- }
-
- public ImmutableList<String> getCoreTypes() {
- return ImmutableList.copyOf(coreTypes.keySet());
- }
-
- public ImmutableList<String> getTypeNames() throws AtlasException {
- List<String> typeNames = new ArrayList<>(typeCache.getAllTypeNames());
- return ImmutableList.copyOf(typeNames);
- }
-
- public ImmutableList<String> getTypeNamesByCategory(final DataTypes.TypeCategory typeCategory) throws AtlasException {
- return getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.CATEGORY, typeCategory.name());
- }});
- }
-
- public ImmutableList<String> getTypeNames(Map<TypeCache.TYPE_FILTER, String> filterMap) throws AtlasException {
- return ImmutableList.copyOf(typeCache.getTypeNames(filterMap));
- }
-
- private void registerPrimitiveTypes() {
- coreTypes.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE);
- coreTypes.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE);
- coreTypes.put(DataTypes.SHORT_TYPE.getName(), DataTypes.SHORT_TYPE);
- coreTypes.put(DataTypes.INT_TYPE.getName(), DataTypes.INT_TYPE);
- coreTypes.put(DataTypes.LONG_TYPE.getName(), DataTypes.LONG_TYPE);
- coreTypes.put(DataTypes.FLOAT_TYPE.getName(), DataTypes.FLOAT_TYPE);
- coreTypes.put(DataTypes.DOUBLE_TYPE.getName(), DataTypes.DOUBLE_TYPE);
- coreTypes.put(DataTypes.BIGINTEGER_TYPE.getName(), DataTypes.BIGINTEGER_TYPE);
- coreTypes.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE);
- coreTypes.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE);
- coreTypes.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE);
- }
-
- /*
- * The only core OOB type we will define is the Struct to represent the Identity of an Instance.
- */
- private void registerCoreTypes() {
-
- idType = new IdType();
- coreTypes.put(idType.getStructType().getName(), idType.getStructType());
- }
-
- public IdType getIdType() {
- return idType;
- }
-
- public boolean isRegistered(String typeName) throws AtlasException {
- return isCoreType(typeName) || typeCache.has(typeName);
- }
-
- protected boolean isCoreType(String typeName) {
-
- return coreTypes.containsKey(typeName);
- }
-
- public IDataType getDataType(String name) throws AtlasException {
- if (isCoreType(name)) {
- return coreTypes.get(name);
- }
-
- if (typeCache.has(name)) {
- return typeCache.get(name);
- }
-
- /*
- * is this an Array Type?
- */
- String arrElemType = TypeUtils.parseAsArrayType(name);
- if (arrElemType != null) {
- IDataType dT = defineArrayType(getDataType(arrElemType));
- return dT;
- }
-
- /*
- * is this a Map Type?
- */
- String[] mapType = TypeUtils.parseAsMapType(name);
- if (mapType != null) {
- IDataType dT =
- defineMapType(getDataType(mapType[0]), getDataType(mapType[1]));
- return dT;
- }
-
- /*
- * Invoke cache callback to possibly obtain type from other storage.
- */
- IDataType dT = typeCache.onTypeFault(name);
- if (dT != null) {
- return dT;
- }
-
- throw new TypeNotFoundException(String.format("Unknown datatype: %s", name));
- }
-
- public <T extends IDataType> T getDataType(Class<T> cls, String name) throws AtlasException {
- try {
- IDataType dt = getDataType(name);
- return cls.cast(dt);
- } catch (ClassCastException cce) {
- throw new AtlasException(cce);
- }
-
- }
-
- public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs)
- throws AtlasException {
- return defineStructType(name, null, errorIfExists, attrDefs);
- }
-
- public StructType defineStructType(String name, String description, boolean errorIfExists, AttributeDefinition... attrDefs)
- throws AtlasException {
- StructTypeDefinition structDef = new StructTypeDefinition(name, description, attrDefs);
- defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.of(structDef),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- return getDataType(StructType.class, structDef.typeName);
- }
-
- /**
- * construct a temporary StructType for a Query Result. This is not registered in the
- * typeSystem.
- * The attributes in the typeDefinition can only reference permanent types.
- * @param name struct type name
- * @param attrDefs struct type definition
- * @return temporary struct type
- * @throws AtlasException
- */
- public StructType defineQueryResultType(String name, Map<String, IDataType> tempTypes,
- AttributeDefinition... attrDefs) throws AtlasException {
-
- AttributeInfo[] infos = new AttributeInfo[attrDefs.length];
- for (int i = 0; i < attrDefs.length; i++) {
- infos[i] = new AttributeInfo(this, attrDefs[i], tempTypes);
- }
-
- return new StructType(this, name, null, infos);
- }
-
- public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef) throws AtlasException {
- defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(traitDef), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- return getDataType(TraitType.class, traitDef.typeName);
- }
-
- public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException {
- defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.of(classDef));
- return getDataType(ClassType.class, classDef.typeName);
- }
-
- public Map<String, IDataType> defineTraitTypes(HierarchicalTypeDefinition<TraitType>... traitDefs)
- throws AtlasException {
- TransientTypeSystem transientTypes =
- new TransientTypeSystem(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.copyOf(traitDefs),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- return transientTypes.defineTypes(false);
- }
-
- public Map<String, IDataType> defineClassTypes(HierarchicalTypeDefinition<ClassType>... classDefs)
- throws AtlasException {
- TransientTypeSystem transientTypes = new TransientTypeSystem(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.copyOf(classDefs));
- return transientTypes.defineTypes(false);
- }
-
- public Map<String, IDataType> updateTypes(TypesDef typesDef) throws AtlasException {
- ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList());
- ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs =
- ImmutableList.copyOf(typesDef.traitTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs =
- ImmutableList.copyOf(typesDef.classTypesAsJavaList());
-
- TransientTypeSystem transientTypes = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs);
- return transientTypes.defineTypes(true);
- }
-
- public Map<String, IDataType> defineTypes(TypesDef typesDef) throws AtlasException {
- ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList());
- ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs =
- ImmutableList.copyOf(typesDef.traitTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs =
- ImmutableList.copyOf(typesDef.classTypesAsJavaList());
-
- return defineTypes(enumDefs, structDefs, traitDefs, classDefs);
- }
-
- public Map<String, IDataType> defineTypes(ImmutableList<EnumTypeDefinition> enumDefs,
- ImmutableList<StructTypeDefinition> structDefs,
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException {
- TransientTypeSystem transientTypes = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs);
- return transientTypes.defineTypes(false);
- }
-
- public DataTypes.ArrayType defineArrayType(IDataType elemType) throws AtlasException {
- assert elemType != null;
- DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
- return dT;
- }
-
- public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException {
- assert keyType != null;
- assert valueType != null;
- DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
- return dT;
- }
-
- public EnumType defineEnumType(String name, EnumValue... values) throws AtlasException {
- return defineEnumType(new EnumTypeDefinition(name, values));
- }
-
- public EnumType defineEnumType(String name, String description, EnumValue... values) throws AtlasException {
- return defineEnumType(new EnumTypeDefinition(name, description, values));
- }
-
- public EnumType defineEnumType(EnumTypeDefinition eDef) throws AtlasException {
- assert eDef.name != null;
- if (isRegistered(eDef.name)) {
- throw new AtlasException(String.format("Redefinition of type %s not supported", eDef.name));
- }
-
- EnumType eT = new EnumType(this, eDef.name, eDef.description, eDef.version, eDef.enumValues);
- typeCache.put(eT);
- return eT;
- }
-
- public SimpleDateFormat getDateFormat() {
- return dateFormat.get();
- }
-
- public boolean allowNullsInCollections() {
- return false;
- }
-
- /**
- * Create an instance of {@link TransientTypeSystem} with the types defined in the {@link TypesDef}.
- *
- * As part of this, a set of verifications are run on the types defined.
- * @param typesDef The new list of types to be created or updated.
- * @param isUpdate True, if types are updated, false otherwise.
- * @return {@link TransientTypeSystem} that holds the newly added types.
- * @throws AtlasException
- */
- public TransientTypeSystem createTransientTypeSystem(TypesDef typesDef, boolean isUpdate) throws AtlasException {
- ImmutableList<EnumTypeDefinition> enumDefs = ImmutableList.copyOf(typesDef.enumTypesAsJavaList());
- ImmutableList<StructTypeDefinition> structDefs = ImmutableList.copyOf(typesDef.structTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs =
- ImmutableList.copyOf(typesDef.traitTypesAsJavaList());
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs =
- ImmutableList.copyOf(typesDef.classTypesAsJavaList());
- TransientTypeSystem transientTypeSystem = new TransientTypeSystem(enumDefs, structDefs, traitDefs, classDefs);
- transientTypeSystem.verifyTypes(isUpdate);
- return transientTypeSystem;
- }
-
- /**
- * Commit the given types to this {@link TypeSystem} instance.
- *
- * This step should be called only after the types have been committed to the backend stores successfully.
- * @param typesAdded newly added types.
- * @throws AtlasException
- */
- public void commitTypes(Map<String, IDataType> typesAdded) throws AtlasException {
- for (Map.Entry<String, IDataType> typeEntry : typesAdded.entrySet()) {
- IDataType type = typeEntry.getValue();
- //Add/replace the new type in the typesystem
- typeCache.put(type);
- }
- }
-
- public class TransientTypeSystem extends TypeSystem {
-
- final ImmutableList<StructTypeDefinition> structDefs;
- final ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs;
- final ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs;
- private final ImmutableList<EnumTypeDefinition> enumDefs;
-
- Map<String, StructTypeDefinition> structNameToDefMap = new HashMap<>();
- Map<String, HierarchicalTypeDefinition<TraitType>> traitNameToDefMap = new HashMap<>();
- Map<String, HierarchicalTypeDefinition<ClassType>> classNameToDefMap = new HashMap<>();
-
- Map<String, IDataType> transientTypes = null;
-
- List<AttributeInfo> recursiveRefs = new ArrayList<>();
- List<DataTypes.ArrayType> recursiveArrayTypes = new ArrayList<>();
- List<DataTypes.MapType> recursiveMapTypes = new ArrayList<>();
-
-
- TransientTypeSystem(ImmutableList<EnumTypeDefinition> enumDefs, ImmutableList<StructTypeDefinition> structDefs,
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) {
- this.enumDefs = enumDefs;
- this.structDefs = structDefs;
- this.traitDefs = traitDefs;
- this.classDefs = classDefs;
- transientTypes = new HashMap<>();
- }
-
- private IDataType dataType(String name) throws AtlasException {
- if (transientTypes.containsKey(name)) {
- return transientTypes.get(name);
- }
-
- return TypeSystem.this.getDataType(IDataType.class, name);
- }
-
- /*
- * Step 1:
- * - validate cannot redefine types
- * - setup shallow Type instances to facilitate recursive type graphs
- */
- private void validateAndSetupShallowTypes(boolean update) throws AtlasException {
- for (EnumTypeDefinition eDef : enumDefs) {
- assert eDef.name != null;
- if (!update) {
- if (TypeSystem.this.isRegistered(eDef.name)) {
- throw new TypeExistsException(String.format("Redefinition of type %s is not supported", eDef.name));
- } else if (transientTypes.containsKey(eDef.name)) {
- LOG.warn("Found duplicate definition of type {}. Ignoring..", eDef.name);
- continue;
- }
- }
-
- EnumType eT = new EnumType(this, eDef.name, eDef.description, eDef.version, eDef.enumValues);
- transientTypes.put(eDef.name, eT);
- }
-
- for (StructTypeDefinition sDef : structDefs) {
- assert sDef.typeName != null;
- if (!update) {
- if (TypeSystem.this.isRegistered(sDef.typeName)) {
- throw new TypeExistsException(String.format("Redefinition of type %s is not supported", sDef.typeName));
- } else if (transientTypes.containsKey(sDef.typeName)) {
- LOG.warn("Found duplicate definition of type {}. Ignoring..", sDef.typeName);
- continue;
- }
- }
-
- StructType sT = new StructType(this, sDef.typeName, sDef.typeDescription, sDef.typeVersion, sDef.attributeDefinitions.length);
- structNameToDefMap.put(sDef.typeName, sDef);
- transientTypes.put(sDef.typeName, sT);
- }
-
- for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
- assert traitDef.typeName != null;
- if (!update) {
- if (TypeSystem.this.isRegistered(traitDef.typeName)) {
- throw new TypeExistsException(String.format("Redefinition of type %s is not supported", traitDef.typeName));
- } else if (transientTypes.containsKey(traitDef.typeName)) {
- LOG.warn("Found duplicate definition of type {}. Ignoring..", traitDef.typeName);
- continue;
- }
- }
-
- TraitType tT = new TraitType(this, traitDef.typeName, traitDef.typeDescription, traitDef.typeVersion, traitDef.superTypes,
- traitDef.attributeDefinitions.length);
- traitNameToDefMap.put(traitDef.typeName, traitDef);
- transientTypes.put(traitDef.typeName, tT);
- }
-
- for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
- assert classDef.typeName != null;
- if (!update) {
- if (TypeSystem.this.isRegistered(classDef.typeName)) {
- throw new TypeExistsException(String.format("Redefinition of type %s is not supported", classDef.typeName));
- } else if (transientTypes.containsKey(classDef.typeName)) {
- LOG.warn("Found duplicate definition of type {}. Ignoring..", classDef.typeName);
- continue;
- }
- }
-
- ClassType cT = new ClassType(this, classDef.typeName, classDef.typeDescription, classDef.typeVersion, classDef.superTypes,
- classDef.attributeDefinitions.length);
- classNameToDefMap.put(classDef.typeName, classDef);
- transientTypes.put(classDef.typeName, cT);
- }
- }
-
- @Override
- public boolean isRegistered(String typeName) throws AtlasException {
- return transientTypes.containsKey(typeName) || TypeSystem.this.isRegistered(typeName);
- }
-
- private <U extends HierarchicalType> void validateSuperTypes(Class<U> cls, HierarchicalTypeDefinition<U> def)
- throws AtlasException {
- for (String superTypeName : def.superTypes) {
-
- IDataType dT = dataType(superTypeName);
-
- if (dT == null) {
- throw new AtlasException(
- String.format("Unknown superType %s in definition of type %s", superTypeName,
- def.typeName));
- }
-
- if (!cls.isAssignableFrom(dT.getClass())) {
- throw new AtlasException(
- String.format("SuperType %s must be a %s, in definition of type %s", superTypeName,
- cls.getName(), def.typeName));
- }
- }
- }
-
- /*
- * Step 2:
- * - for Hierarchical Types, validate SuperTypes.
- * - for each Hierarchical Type setup their SuperTypes Graph
- */
- private void validateAndSetupSuperTypes() throws AtlasException {
- for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
- validateSuperTypes(TraitType.class, traitDef);
- TraitType traitType = getDataType(TraitType.class, traitDef.typeName);
- traitType.setupSuperTypesGraph();
- }
-
- for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
- validateSuperTypes(ClassType.class, classDef);
- ClassType classType = getDataType(ClassType.class, classDef.typeName);
- classType.setupSuperTypesGraph();
- }
- }
-
- private AttributeInfo constructAttributeInfo(AttributeDefinition attrDef) throws AtlasException {
- AttributeInfo info = new AttributeInfo(this, attrDef, null);
- if (transientTypes.keySet().contains(attrDef.dataTypeName)) {
- recursiveRefs.add(info);
- }
- if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- DataTypes.ArrayType arrType = (DataTypes.ArrayType) info.dataType();
- if (transientTypes.keySet().contains(arrType.getElemType().getName())) {
- recursiveArrayTypes.add(arrType);
- }
- }
- if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- DataTypes.MapType mapType = (DataTypes.MapType) info.dataType();
- if (transientTypes.keySet().contains(mapType.getKeyType().getName())) {
- recursiveMapTypes.add(mapType);
- } else if (transientTypes.keySet().contains(mapType.getValueType().getName())) {
- recursiveMapTypes.add(mapType);
- }
- }
-
- if (info.multiplicity.upper > 1 && !(info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP
- || info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
- throw new AtlasException(
- String.format("A multiplicty of more than one requires a collection type for attribute '%s'",
- info.name));
- }
-
- return info;
- }
-
- private StructType constructStructureType(StructTypeDefinition def) throws AtlasException {
- AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
- for (int i = 0; i < def.attributeDefinitions.length; i++) {
- infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
- }
-
- StructType type = new StructType(this, def.typeName, def.typeDescription, def.typeVersion, infos);
- transientTypes.put(def.typeName, type);
- return type;
- }
-
- private <U extends HierarchicalType> U constructHierarchicalType(Class<U> cls,
- HierarchicalTypeDefinition<U> def) throws AtlasException {
- AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
- for (int i = 0; i < def.attributeDefinitions.length; i++) {
- infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
- }
-
- try {
- Constructor<U> cons = cls.getDeclaredConstructor(TypeSystem.class, String.class, String.class, String.class, ImmutableSet.class,
- AttributeInfo[].class);
- U type = cons.newInstance(this, def.typeName, def.typeDescription, def.typeVersion, def.superTypes, infos);
- transientTypes.put(def.typeName, type);
- return type;
- } catch (Exception e) {
- e.printStackTrace();
- throw new AtlasException(String.format("Cannot construct Type of MetaType %s - %s", cls.getName(), def.typeName), e);
- }
- }
-
- /*
- * Step 3:
- * - Order Hierarchical Types in order of SuperType before SubType.
- * - Construct all the Types
- */
- private void orderAndConstructTypes() throws AtlasException {
-
- List<TraitType> traitTypes = new ArrayList<>();
- for (String traitTypeName : traitNameToDefMap.keySet()) {
- traitTypes.add(getDataType(TraitType.class, traitTypeName));
- }
- traitTypes = HierarchicalTypeDependencySorter.sortTypes(traitTypes);
-
- List<ClassType> classTypes = new ArrayList<>();
- for (String classTypeName : classNameToDefMap.keySet()) {
- classTypes.add(getDataType(ClassType.class, classTypeName));
- }
- classTypes = HierarchicalTypeDependencySorter.sortTypes(classTypes);
-
- for (StructTypeDefinition structDef : structDefs) {
- constructStructureType(structDef);
- }
-
- for (TraitType traitType : traitTypes) {
- constructHierarchicalType(TraitType.class, traitNameToDefMap.get(traitType.getName()));
- }
-
- for (ClassType classType : classTypes) {
- constructHierarchicalType(ClassType.class, classNameToDefMap.get(classType.getName()));
- }
- }
-
- /*
- * Step 4:
- * - fix up references in recursive AttrInfo and recursive Collection Types.
- */
- private void setupRecursiveTypes() throws AtlasException {
- for (AttributeInfo info : recursiveRefs) {
- info.setDataType(dataType(info.dataType().getName()));
- }
- for (DataTypes.ArrayType arrType : recursiveArrayTypes) {
- arrType.setElemType(dataType(arrType.getElemType().getName()));
- }
- for (DataTypes.MapType mapType : recursiveMapTypes) {
- mapType.setKeyType(dataType(mapType.getKeyType().getName()));
- mapType.setValueType(dataType(mapType.getValueType().getName()));
- }
- }
-
- /**
- * Step 5:
- * - Validate that the update can be done
- */
- private void validateUpdateIsPossible() throws TypeUpdateException, AtlasException {
- //If the type is modified, validate that update can be done
- for (IDataType newType : transientTypes.values()) {
- IDataType oldType = null;
- try {
- oldType = TypeSystem.this.getDataType(IDataType.class, newType.getName());
- } catch (TypeNotFoundException e) {
- LOG.debug(String.format("No existing type %s found - update OK", newType.getName()));
- }
- if (oldType != null) {
- oldType.validateUpdate(newType);
- }
- }
- }
-
- Map<String, IDataType> defineTypes(boolean update) throws AtlasException {
- verifyTypes(update);
- Map<String, IDataType> typesAdded = getTypesAdded();
- commitTypes(typesAdded);
- return typesAdded;
- }
-
- @Override
- public ImmutableList<String> getTypeNames() throws AtlasException {
- Set<String> typeNames = transientTypes.keySet();
- typeNames.addAll(TypeSystem.this.getTypeNames());
- return ImmutableList.copyOf(typeNames);
- }
-
- //get from transient types. Else, from main type system
- @Override
- public IDataType getDataType(String name) throws AtlasException {
-
- if (transientTypes != null) {
- if (transientTypes.containsKey(name)) {
- return transientTypes.get(name);
- }
-
- /*
- * is this an Array Type?
- */
- String arrElemType = TypeUtils.parseAsArrayType(name);
- if (arrElemType != null) {
- IDataType dT = defineArrayType(getDataType(IDataType.class, arrElemType));
- return dT;
- }
-
- /*
- * is this a Map Type?
- */
- String[] mapType = TypeUtils.parseAsMapType(name);
- if (mapType != null) {
- IDataType dT =
- defineMapType(getDataType(IDataType.class, mapType[0]), getDataType(IDataType.class, mapType[1]));
- return dT;
- }
- }
-
- return TypeSystem.this.getDataType(name);
- }
-
- @Override
- public StructType defineStructType(String name, boolean errorIfExists, AttributeDefinition... attrDefs)
- throws AtlasException {
- throw new AtlasException("Internal Error: define type called on TransientTypeSystem");
- }
-
- @Override
- public TraitType defineTraitType(HierarchicalTypeDefinition traitDef) throws AtlasException {
- throw new AtlasException("Internal Error: define type called on TransientTypeSystem");
- }
-
- @Override
- public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef) throws AtlasException {
- throw new AtlasException("Internal Error: define type called on TransientTypeSystem");
- }
-
- @Override
- public Map<String, IDataType> defineTypes(ImmutableList<EnumTypeDefinition> enumDefs,
- ImmutableList<StructTypeDefinition> structDefs,
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) throws AtlasException {
- throw new AtlasException("Internal Error: define type called on TransientTypeSystem");
- }
-
- @Override
- public DataTypes.ArrayType defineArrayType(IDataType elemType) throws AtlasException {
- return super.defineArrayType(elemType);
- }
-
- @Override
- public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType) throws AtlasException {
- return super.defineMapType(keyType, valueType);
- }
-
- void verifyTypes(boolean isUpdate) throws AtlasException {
- validateAndSetupShallowTypes(isUpdate);
- validateAndSetupSuperTypes();
- orderAndConstructTypes();
- setupRecursiveTypes();
- if (isUpdate) {
- validateUpdateIsPossible();
- }
- }
-
- @Override
- public void commitTypes(Map<String, IDataType> typesAdded) throws AtlasException {
- TypeSystem.this.commitTypes(typesAdded);
- }
-
- public Map<String, IDataType> getTypesAdded() {
- return new HashMap<>(transientTypes);
- }
-
- /**
- * The core types do not change and they are registered
- * once in the main type system.
- */
- @Override
- public ImmutableList<String> getCoreTypes() {
- return TypeSystem.this.getCoreTypes();
- }
- }
-
- public class IdType {
- private static final String ID_ATTRNAME = "guid";
- private static final String TYPENAME_ATTRNAME = "typeName";
- private static final String STATE_ATTRNAME = "state";
- private static final String VERSION_ATTRNAME = "version";
- private static final String TYP_NAME = "__IdType";
-
- private StructType type;
-
- private IdType() {
- AttributeDefinition idAttr =
- new AttributeDefinition(ID_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
- null);
- AttributeDefinition typNmAttr =
- new AttributeDefinition(TYPENAME_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
- false, null);
- AttributeDefinition stateAttr =
- new AttributeDefinition(STATE_ATTRNAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
- false, null);
- AttributeDefinition versionAttr =
- new AttributeDefinition(VERSION_ATTRNAME, DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED,
- false, null);
- try {
- AttributeInfo[] infos = new AttributeInfo[4];
- infos[0] = new AttributeInfo(TypeSystem.this, idAttr, null);
- infos[1] = new AttributeInfo(TypeSystem.this, typNmAttr, null);
- infos[2] = new AttributeInfo(TypeSystem.this, stateAttr, null);
- infos[3] = new AttributeInfo(TypeSystem.this, versionAttr, null);
-
- type = new StructType(TypeSystem.this, TYP_NAME, null, infos);
- } catch (AtlasException me) {
- throw new RuntimeException(me);
- }
- }
-
- public StructType getStructType() {
- return type;
- }
-
- public String getName() {
- return TYP_NAME;
- }
-
- public String idAttrName() {
- return ID_ATTRNAME;
- }
-
- public String typeNameAttrName() {
- return TYPENAME_ATTRNAME;
- }
-
- public String stateAttrName() {
- return STATE_ATTRNAME;
- }
-
- public String versionAttrName() {
- return VERSION_ATTRNAME;
- }
- }
-
- public static final String ID_STRUCT_ID_ATTRNAME = IdType.ID_ATTRNAME;
- public static final String ID_STRUCT_TYP_NAME = IdType.TYP_NAME;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUpdateException.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUpdateException.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUpdateException.java
deleted file mode 100644
index 33d1cb5..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUpdateException.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-
-public class TypeUpdateException extends AtlasException {
- public TypeUpdateException(IDataType newType) {
- super(newType.getName() + " can't be updated");
- }
-
- public TypeUpdateException(IDataType newType, Exception e) {
- super(newType.getName() + " can't be updated - " + e.getMessage(), e);
- }
-
- public TypeUpdateException(String message) {
- super(message);
- }
-
- public TypeUpdateException(IDataType newType, String message) {
- super(newType.getName() + " can't be updated - " + message);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUtils.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUtils.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUtils.java
deleted file mode 100755
index 6a14dc4..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypeUtils.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-
-import java.io.IOException;
-import java.util.*;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class TypeUtils {
-
- public static final String NAME_REGEX = "[a-zA-z][a-zA-Z0-9_]*";
- public static final Pattern NAME_PATTERN = Pattern.compile(NAME_REGEX);
- public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern.compile(String.format("array<(%s)>", NAME_REGEX));
- public static final Pattern MAP_TYPE_NAME_PATTERN =
- Pattern.compile(String.format("map<(%s),(%s)>", NAME_REGEX, NAME_REGEX));
-
- public static void outputVal(String val, Appendable buf, String prefix) throws AtlasException {
- try {
- buf.append(prefix).append(val);
- } catch (IOException ie) {
- throw new AtlasException(ie);
- }
- }
-
- public static String parseAsArrayType(String typeName) {
- Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName);
- return m.matches() ? m.group(1) : null;
- }
-
- public static String[] parseAsMapType(String typeName) {
- Matcher m = MAP_TYPE_NAME_PATTERN.matcher(typeName);
- return m.matches() ? new String[]{m.group(1), m.group(2)} : null;
- }
-
- public static Map<AttributeInfo, List<String>> buildAttrInfoToNameMap(FieldMapping f) {
- Map<AttributeInfo, List<String>> b = new HashMap();
- for (Map.Entry<String, AttributeInfo> e : f.fields.entrySet()) {
- List<String> names = b.get(e.getValue());
- if (names == null) {
- names = new ArrayList<>();
- b.put(e.getValue(), names);
- }
- names.add(e.getKey());
- }
- return b;
- }
-
- public static class Pair<L, R> {
- public L left;
- public R right;
-
- public Pair(L left, R right) {
- this.left = left;
- this.right = right;
- }
-
- public static <L, R> Pair<L, R> of(L left, R right) {
- return new Pair<>(left, right);
- }
-
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
-
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
-
- Pair p = (Pair)o;
-
- return Objects.equals(left, p.left) && Objects.equals(right, p.right);
- }
-
- public int hashCode() { return Objects.hash(left, right); }
- }
-
- /**
- * Validates that the old field mapping can be replaced with new field mapping
- * @param oldFieldMapping
- * @param newFieldMapping
- */
- public static void validateUpdate(FieldMapping oldFieldMapping, FieldMapping newFieldMapping)
- throws TypeUpdateException {
- Map<String, AttributeInfo> newFields = newFieldMapping.fields;
- for (AttributeInfo attribute : oldFieldMapping.fields.values()) {
- if (newFields.containsKey(attribute.name)) {
- AttributeInfo newAttribute = newFields.get(attribute.name);
- //If old attribute is also in new definition, only allowed change is multiplicity change from REQUIRED to OPTIONAL
- if (!newAttribute.equals(attribute)) {
- if (attribute.multiplicity == Multiplicity.REQUIRED
- && newAttribute.multiplicity == Multiplicity.OPTIONAL) {
- continue;
- } else {
- throw new TypeUpdateException("Attribute " + attribute.name + " can't be updated");
- }
- }
-
- } else {
- //If old attribute is missing in new definition, return false as attributes can't be deleted
- throw new TypeUpdateException("Old Attribute " + attribute.name + " is missing");
- }
- }
-
- //Only new attributes
- Set<String> newAttributes = new HashSet<>(ImmutableList.copyOf(newFields.keySet()));
- newAttributes.removeAll(oldFieldMapping.fields.keySet());
- for (String attributeName : newAttributes) {
- AttributeInfo newAttribute = newFields.get(attributeName);
- //New required attribute can't be added
- if (newAttribute.multiplicity == Multiplicity.REQUIRED) {
- throw new TypeUpdateException("Can't add required attribute " + attributeName);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypedStructHandler.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypedStructHandler.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypedStructHandler.java
deleted file mode 100755
index 9afa873..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/TypedStructHandler.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.Date;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-
-public class TypedStructHandler {
-
- private final IConstructableType<IStruct, ITypedStruct> structType;
- private final FieldMapping fieldMapping;
-
- public TypedStructHandler(IConstructableType<IStruct, ITypedStruct> structType) {
- this.structType = structType;
- fieldMapping = structType.fieldMapping();
- }
-
- public ITypedStruct convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof ITypedStruct) {
- ITypedStruct ts = (ITypedStruct) val;
- if (!Objects.equals(ts.getTypeName(), structType.getName())) {
- throw new ValueConversionException(structType, val);
- }
- return ts;
- } else if (val instanceof Struct) {
- Struct s = (Struct) val;
- if (!s.typeName.equals(structType.getName())) {
- throw new ValueConversionException(structType, val);
- }
- ITypedStruct ts = createInstance();
- for (Map.Entry<String, AttributeInfo> e : fieldMapping.fields.entrySet()) {
- String attrKey = e.getKey();
- AttributeInfo i = e.getValue();
- Object aVal = s.get(attrKey);
- try {
- ts.set(attrKey, aVal);
- } catch (ValueConversionException ve) {
- throw new ValueConversionException(structType, val, ve);
- }
- }
- return ts;
- } else if (val instanceof StructInstance && Objects.equals(((StructInstance) val).getTypeName(), structType.getName())) {
- return (StructInstance) val;
- } else {
- throw new ValueConversionException(structType, val);
- }
- }
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
- return null;
- }
-
- public DataTypes.TypeCategory getTypeCategory() {
- return DataTypes.TypeCategory.STRUCT;
- }
-
- public ITypedStruct createInstance() {
- return new StructInstance(structType.getName(), fieldMapping, new boolean[fieldMapping.fields.size()],
- new boolean[fieldMapping.fields.size()],
- fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
- fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
- fieldMapping.numShorts == 0 ? null : new short[fieldMapping.numShorts],
- fieldMapping.numInts == 0 ? null : new int[fieldMapping.numInts],
- fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
- fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
- fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
- fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
- fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
- fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
- fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
- fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
- fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
- fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
- fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
- fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables]);
- }
-
- public void output(IStruct s, Appendable buf, String prefix, Set<IStruct> inProcess) throws AtlasException {
- fieldMapping.output(s, buf, prefix, inProcess);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/ValueConversionException.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ValueConversionException.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ValueConversionException.java
deleted file mode 100755
index f756135..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ValueConversionException.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-
-public class ValueConversionException extends AtlasException {
-
- public ValueConversionException(IDataType typ, Object val) {
- this(typ, val, (Throwable) null);
- }
-
- public ValueConversionException(IDataType typ, Object val, Throwable t) {
- super(String.format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()), t);
- }
-
- public ValueConversionException(IDataType typ, Object val, String msg) {
- super(String
- .format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typ.getName(), msg));
- }
-
- public ValueConversionException(String typeName, Object val, String msg) {
- super(String.format("Cannot convert value '%s' to datatype %s because: %s", val.toString(), typeName, msg));
- }
-
- protected ValueConversionException(String msg) {
- super(msg);
- }
-
- protected ValueConversionException(String msg, Exception e) {
- super(msg, e);
- }
-
- public static class NullConversionException extends ValueConversionException {
- public NullConversionException(Multiplicity m) {
- super(String.format("Null value not allowed for multiplicty %s", m));
- }
-
- public NullConversionException(Multiplicity m, String msg){
- super(String.format("Null value not allowed for multiplicty %s . Message %s", m, msg));
- }
-
- public NullConversionException(String msg, Exception e) {
- super(msg, e);
- }
- }
-}
[13/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala b/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
deleted file mode 100755
index b01cbf8..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
+++ /dev/null
@@ -1,541 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.{DBSandboxer, TestUtils}
-import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy
-import org.apache.atlas.query.Expressions._class
-import org.apache.atlas.query.Expressions.id
-import org.apache.atlas.query.Expressions.int
-import org.apache.atlas.repository.graph.AtlasGraphProvider
-import org.apache.atlas.repository.graph.GraphBackedMetadataRepository
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.testng.annotations._
-
-class LineageQueryTest extends BaseGremlinTest {
-
- var g: AtlasGraph[_,_] = null
- var gp:GraphPersistenceStrategies = null;
-
- @BeforeMethod
- def resetRequestContext() {
- TestUtils.resetRequestContext()
- }
-
-
- @BeforeClass
- def beforeAll() {
- TypeSystem.getInstance().reset()
- var repo = new GraphBackedMetadataRepository(null, new AtlasGraphProvider().get());
- TestUtils.setupGraphProvider(repo);
- //force graph to be initialized first
- AtlasGraphProvider.getGraphInstance();
-
- //create types and indices up front. Without this, some of the property keys (particularly __traitNames and __superTypes)
- //get ended up created implicitly with some graph backends with the wrong multiplicity. This also makes the queries
- //we execute perform better :-)
- QueryTestsUtils.setupTypesAndIndices()
-
- gp = new DefaultGraphPersistenceStrategy(repo);
- g = QueryTestsUtils.setupTestGraph(repo)
- }
-
- @AfterClass
- def afterAll() {
- AtlasGraphProvider.cleanup()
- }
-
- val PREFIX_SPACES_REGEX = ("\\n\\s*").r
-
- @Test def testInputTables {
- val r = QueryProcessor.evaluate(_class("LoadProcess").field("inputTables"), g, gp)
- val x = r.toJson
- validateJson(r,"""{
- | "query":"LoadProcess inputTables",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Table",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"db",
- | "dataTypeName":"DB",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"sd",
- | "dataTypeName":"StorageDescriptor",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"created",
- | "dataTypeName":"date",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"time_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | }
- | ]
- |}
- """.stripMargin)
- }
-
- @Test def testLoadProcessOut {
- val r = QueryProcessor.evaluate(_class("Table").field("LoadProcess").field("outputTable"), g, gp)
- validateJson(r, null)
- }
-
- @Test def testLineageAll {
- val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable")), g, gp)
- validateJson(r, """{
- | "query":"Table as _loop0 loop (LoadProcess outputTable)",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Table",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"db",
- | "dataTypeName":"DB",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"sd",
- | "dataTypeName":"StorageDescriptor",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"created",
- | "dataTypeName":"date",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_monthly_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_monthly_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_monthly_mv"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testLineageAllSelect {
- val r = QueryProcessor.evaluate(_class("Table").as("src").loop(id("LoadProcess").field("outputTable")).as("dest").
- select(id("src").field("name").as("srcTable"), id("dest").field("name").as("destTable")), g, gp)
- validateJson(r, """{
- "query":"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable",
- "dataType":{
- "typeName":"__tempQueryResultStruct2",
- "attributeDefinitions":[
- {
- "name":"srcTable",
- "dataTypeName":"string",
- "multiplicity":{
- "lower":0,
- "upper":1,
- "isUnique":false
- },
- "isComposite":false,
- "isUnique":false,
- "isIndexable":false,
- "reverseAttributeName":null
- },
- {
- "name":"destTable",
- "dataTypeName":"string",
- "multiplicity":{
- "lower":0,
- "upper":1,
- "isUnique":false
- },
- "isComposite":false,
- "isUnique":false,
- "isIndexable":false,
- "reverseAttributeName":null
- }
- ]
- },
- "rows":[
- {
- "$typeName$":"__tempQueryResultStruct2",
- "srcTable":"sales_fact",
- "destTable":"sales_fact_daily_mv"
- },
- {
- "$typeName$":"__tempQueryResultStruct2",
- "srcTable":"sales_fact",
- "destTable":"sales_fact_monthly_mv"
- },
- {
- "$typeName$":"__tempQueryResultStruct2",
- "srcTable":"time_dim",
- "destTable":"sales_fact_daily_mv"
- },
- {
- "$typeName$":"__tempQueryResultStruct2",
- "srcTable":"time_dim",
- "destTable":"sales_fact_monthly_mv"
- },
- {
- "$typeName$":"__tempQueryResultStruct2",
- "srcTable":"sales_fact_daily_mv",
- "destTable":"sales_fact_monthly_mv"
- }
- ]
-}""".stripMargin)
- }
-
- @Test def testLineageFixedDepth {
- val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable"), int(1)), g, gp)
- validateJson(r, """{
- | "query":"Table as _loop0 loop (LoadProcess outputTable) times 1",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Table",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"db",
- | "dataTypeName":"DB",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"sd",
- | "dataTypeName":"StorageDescriptor",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"created",
- | "dataTypeName":"date",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_monthly_mv"
- | }
- | ]
- |}""".stripMargin)
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala b/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
deleted file mode 100755
index de5d879..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.DBSandboxer
-import org.apache.atlas.repository.BaseTest
-import org.testng.annotations.{BeforeMethod, Listeners, Test}
-
-
-class ParserTest extends BaseTest {
-
- @BeforeMethod
- override def setup {
- super.setup
- QueryTestsUtils.setupTypes
- }
-
- @Test def testFrom: Unit = {
- println(QueryParser.apply("from DB").right.get.toString)
- }
-
- @Test def testFrom2: Unit = {
- println(QueryParser.apply("DB").right.get.toString)
- }
-
- @Test def testJoin1: Unit = {
- println(QueryParser.apply("DB, Table").right.get.toString)
- }
-
- @Test def testWhere1: Unit = {
- println(QueryParser.apply("DB as db1 Table where db1.name ").right.get.toString)
- }
-
- @Test def testWhere2: Unit = {
- println(QueryParser.apply("DB name = \"Reporting\"").right.get.toString)
- }
-
- @Test def testIsTrait: Unit = {
- println(QueryParser.apply("Table isa Dimension").right.get.toString)
- println(QueryParser.apply("Table is Dimension").right.get.toString)
- }
-
- @Test def test4: Unit = {
- println(QueryParser.apply("DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1").right.get.toString)
- }
-
- @Test def testJoin2: Unit = {
- println(QueryParser.apply("DB as db1 where (createTime + 1) > 0 and (db1.name = \"Reporting\") or DB has owner Table as tab " +
- " select db1.name as dbName, tab.name as tabName").right.get.toString)
- }
-
- @Test def testLoop: Unit = {
- println(QueryParser.apply("Table loop (LoadProcess outputTable)").right.get.toString)
- }
-
- @Test def testNegInvalidateType: Unit = {
- val x = QueryParser.apply("from blah")
- println(QueryParser.apply("from blah").left)
- }
-
- @Test def testPath1: Unit = {
- println(QueryParser.apply("Table loop (LoadProcess outputTable) withPath").right.get.toString)
- }
-
- @Test def testPath2: Unit = {
- println(QueryParser.apply(
- "Table as src loop (LoadProcess outputTable) as dest " +
- "select src.name as srcTable, dest.name as destTable withPath").right.get.toString
- )
- }
-
- @Test def testList: Unit = {
- println(QueryParser.apply(
- "Partition as p where values = ['2015-01-01']," +
- " table where name = 'tableoq8ty'," +
- " db where name = 'default' and clusterName = 'test'").right.get.toString
- )
- }
-
- @Test def testorder_by: Unit = {
- println(QueryParser.apply("from DB order by columnA").right.get.toString)
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala b/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
deleted file mode 100755
index f26f98e..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import java.io.File
-import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
-
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.repository.graphdb.AtlasVertex
-import com.typesafe.config.{Config, ConfigFactory}
-import org.apache.atlas.typesystem.types._
-import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration}
-import org.apache.commons.io.FileUtils
-import org.apache.commons.lang.RandomStringUtils
-import org.json.JSONObject
-import org.skyscreamer.jsonassert.JSONAssert
-
-import scala.util.Random
-import org.apache.atlas.repository.MetadataRepository
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.repository.graph.AtlasGraphProvider
-import java.net.URL
-
-import org.apache.atlas.`type`.AtlasTypeRegistry
-import org.apache.atlas.repository.graph.GraphBackedSearchIndexer
-import org.apache.atlas.typesystem.TypesDef
-import org.apache.atlas.typesystem.ITypedReferenceableInstance
-
-
-trait GraphUtils {
-
- import scala.collection.JavaConversions._
-
- def getConfiguration(config: Config): Configuration = {
- val keys = config.entrySet().map {
- _.getKey
- }
- val gConfig: java.util.Map[String, String] = new java.util.HashMap[String, String]()
- keys.foreach { k =>
- gConfig.put(k, config.getString(k))
- }
- return new MapConfiguration(gConfig)
- }
-
-
- def graph(conf: Configuration) = {
- try {
- val g = AtlasGraphProvider.getGraphInstance
- val mgmt = g.getManagementSystem
- val typname = mgmt.makePropertyKey("typeName", classOf[String], null);
- mgmt.createExactMatchIndex("byTypeName", false, List(typname));
- mgmt.commit()
- g
- } catch {
- case e: ConfigurationException => throw new RuntimeException(e)
- }
- }
-}
-
-object QueryTestsUtils extends GraphUtils {
-
- def setupTypesAndIndices() : Unit = {
- val indexer = new GraphBackedSearchIndexer(new AtlasTypeRegistry());
- val typesDef : TypesDef = defineTypes;
- val newTypes = TypeSystem.getInstance.defineTypes(typesDef);
- indexer.onAdd(newTypes.values());
- }
-
- def setupTypes: Unit = {
-
- val types : TypesDef = defineTypes;
- TypeSystem.getInstance.defineTypes(types);
- }
-
-
- def defineTypes: TypesDef = {
- def attrDef(name: String, dT: IDataType[_],
- m: Multiplicity = Multiplicity.OPTIONAL,
- isComposite: Boolean = false,
- reverseAttributeName: String = null) = {
- require(name != null)
- require(dT != null)
- new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
- }
-
- def dbClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "DB", null, null,
- Array(
- attrDef("name", DataTypes.STRING_TYPE),
- attrDef("owner", DataTypes.STRING_TYPE),
- attrDef("createTime", DataTypes.INT_TYPE),
- attrDef("clusterName", DataTypes.STRING_TYPE)
- ))
-
- def hiveOrderDef = new StructTypeDefinition("HiveOrder",
- Array(
- attrDef("col", DataTypes.STRING_TYPE),
- attrDef("order", DataTypes.INT_TYPE)
- ))
-
- def storageDescClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "StorageDescriptor", null, null,
- Array(
- attrDef("inputFormat", DataTypes.STRING_TYPE),
- attrDef("outputFormat", DataTypes.STRING_TYPE),
- new AttributeDefinition("sortCols", DataTypes.arrayTypeName("HiveOrder"), Multiplicity.REQUIRED, false, null)
- ))
-
- def columnClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "Column", null, null,
- Array(
- attrDef("name", DataTypes.STRING_TYPE),
- attrDef("dataType", DataTypes.STRING_TYPE),
- new AttributeDefinition("sd", "StorageDescriptor", Multiplicity.REQUIRED, false, null)
- ))
-
- def tblClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "Table", null, null,
- Array(
- attrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("db", "DB", Multiplicity.REQUIRED, false, null),
- new AttributeDefinition("sd", "StorageDescriptor", Multiplicity.REQUIRED, false, null),
- attrDef("created", DataTypes.DATE_TYPE)
- ))
-
- def partitionClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "Partition", null, null,
- Array(
- new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName), Multiplicity.REQUIRED, false, null),
- new AttributeDefinition("table", "Table", Multiplicity.REQUIRED, false, null)
- ))
-
- def loadProcessClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "LoadProcess", null, null,
- Array(
- attrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null),
- new AttributeDefinition("outputTable", "Table", Multiplicity.REQUIRED, false, null)
- ))
-
- def viewClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "View", null, null,
- Array(
- attrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null)
- ))
-
- def dimTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Dimension", null, null,
- Array[AttributeDefinition]())
- def piiTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "PII", null, null,
- Array[AttributeDefinition]())
- def metricTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Metric", null, null,
- Array[AttributeDefinition]())
- def etlTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "ETL", null, null,
- Array[AttributeDefinition]())
- def jdbcTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "JdbcAccess", null, null,
- Array[AttributeDefinition]())
-
- TypesDef(Seq[EnumTypeDefinition](),
- Seq[StructTypeDefinition](hiveOrderDef),
- Seq[HierarchicalTypeDefinition[TraitType]](dimTraitDef, piiTraitDef,
- metricTraitDef, etlTraitDef, jdbcTraitDef),
- Seq[HierarchicalTypeDefinition[ClassType]](dbClsDef, storageDescClsDef, columnClsDef, tblClsDef,
- partitionClsDef, loadProcessClsDef, viewClsDef))
- }
-
- def setupTestGraph(repo : MetadataRepository): AtlasGraph[_,_] = {
-
- val g = AtlasGraphProvider.getGraphInstance();
- val entities = HiveTitanSample.getEntitiesToCreate();
- repo.createEntities(entities:_*)
- g.commit();
- g
- }
-
-
-
-}
-
-
-
-trait BaseGremlinTest {
- val STRUCT_NAME_REGEX = (TypeUtils.TEMP_STRUCT_NAME_PREFIX + "\\d+").r
- def validateJson(r: GremlinQueryResult, expected: String = null): Unit = {
- val rJ = r.toJson
- if (expected != null) {
- val a = STRUCT_NAME_REGEX.replaceAllIn(rJ, "")
- val b = STRUCT_NAME_REGEX.replaceAllIn(expected, "")
- val actualjsonObj = new JSONObject(a)
- val expectedjsonObj = new JSONObject(b)
- JSONAssert.assertEquals(expectedjsonObj, actualjsonObj, false)
- } else {
- println(rJ)
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/pom.xml
----------------------------------------------------------------------
diff --git a/server-api/pom.xml b/server-api/pom.xml
index 6e99ca3..6919dcd 100644
--- a/server-api/pom.xml
+++ b/server-api/pom.xml
@@ -37,11 +37,6 @@
</dependency>
<dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/RequestContext.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/RequestContext.java b/server-api/src/main/java/org/apache/atlas/RequestContext.java
deleted file mode 100644
index 3e729b5..0000000
--- a/server-api/src/main/java/org/apache/atlas/RequestContext.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.atlas.metrics.Metrics;
-import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Deprecated
-public class RequestContext {
- private static final Logger LOG = LoggerFactory.getLogger(RequestContext.class);
-
- private static final ThreadLocal<RequestContext> CURRENT_CONTEXT = new ThreadLocal<>();
-
- private Set<String> createdEntityIds = new LinkedHashSet<>();
- private Set<String> updatedEntityIds = new LinkedHashSet<>();
- private Set<String> deletedEntityIds = new LinkedHashSet<>();
- private List<ITypedReferenceableInstance> deletedEntities = new ArrayList<>();
- private Map<String,ITypedReferenceableInstance> entityCacheV1 = new HashMap<>();
- private Map<String,AtlasEntityWithExtInfo> entityCacheV2 = new HashMap<>();
-
- private String user;
- private long requestTime;
-
- private TypeSystem typeSystem = TypeSystem.getInstance();
- private Metrics metrics = new Metrics();
-
- private RequestContext() {
- }
-
- //To handle gets from background threads where createContext() is not called
- //createContext called for every request in the filter
- public static RequestContext get() {
- if (CURRENT_CONTEXT.get() == null) {
- synchronized (RequestContext.class) {
- if (CURRENT_CONTEXT.get() == null) {
- createContext();
- }
- }
- }
-
- // ensure that RequestContextV1 is also initialized for this request
- RequestContextV1.get();
-
- return CURRENT_CONTEXT.get();
- }
-
- public static RequestContext createContext() {
- RequestContext context = new RequestContext();
- context.requestTime = System.currentTimeMillis();
- CURRENT_CONTEXT.set(context);
- return context;
- }
-
- /**
- * Adds the specified instance to the cache
- *
- */
- public void cache(ITypedReferenceableInstance instance) {
- entityCacheV1.put(instance.getId()._getId(), instance);
- }
-
- /**
- * Adds the specified instance to the cache
- *
- */
- public void cache(AtlasEntityWithExtInfo entity) {
- if (entity != null && entity.getEntity() != null && entity.getEntity().getGuid() != null) {
- entityCacheV2.put(entity.getEntity().getGuid(), entity);
- }
- }
-
- /**
- * Checks if an instance with the given guid is in the cache for this request. Either returns the instance
- * or null if it is not in the cache.
- *
- * @param guid the guid to find
- * @return Either the instance or null if it is not in the cache.
- */
- public ITypedReferenceableInstance getInstanceV1(String guid) {
- return entityCacheV1.get(guid);
- }
-
- /**
- * Checks if an instance with the given guid is in the cache for this request. Either returns the instance
- * or null if it is not in the cache.
- *
- * @param guid the guid to find
- * @return Either the instance or null if it is not in the cache.
- */
- public AtlasEntityWithExtInfo getInstanceV2(String guid) {
- return entityCacheV2.get(guid);
- }
-
- public static void clear() {
- RequestContext instance = CURRENT_CONTEXT.get();
-
- if (instance != null) {
- if (instance.entityCacheV1 != null) {
- instance.entityCacheV1.clear();
- }
-
- if (instance.entityCacheV2 != null) {
- instance.entityCacheV2.clear();
- }
- }
-
- CURRENT_CONTEXT.remove();
- }
-
- public String getUser() {
- return user;
- }
-
- public void setUser(String user) {
- this.user = user;
-
- RequestContextV1.get().setUser(user);
- }
-
- public void recordEntityCreate(Collection<String> createdEntityIds) {
- this.createdEntityIds.addAll(createdEntityIds);
- }
-
- public void recordEntityUpdate(Collection<String> updatedEntityIds) {
- this.updatedEntityIds.addAll(updatedEntityIds);
- }
-
- public void recordEntityUpdate(String entityId) {
- this.updatedEntityIds.add(entityId);
- }
-
- public void recordEntityDelete(String entityId, String typeName) throws AtlasException {
- ClassType type = typeSystem.getDataType(ClassType.class, typeName);
- ITypedReferenceableInstance entity = type.createInstance(new Id(entityId, 0, typeName));
- if (deletedEntityIds.add(entityId)) {
- deletedEntities.add(entity);
- }
- }
-
- public List<String> getCreatedEntityIds() {
- return new ArrayList<>(createdEntityIds);
- }
-
- public List<String> getUpdatedEntityIds() {
- return new ArrayList<>(updatedEntityIds);
- }
-
- public List<String> getDeletedEntityIds() {
- return new ArrayList<>(deletedEntityIds);
- }
-
- public List<ITypedReferenceableInstance> getDeletedEntities() {
- return deletedEntities;
- }
-
- public long getRequestTime() {
- return requestTime;
- }
-
- public boolean isDeletedEntity(String entityGuid) {
- return deletedEntityIds.contains(entityGuid);
- }
-
- public static Metrics getMetrics() {
- return get().metrics;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/RequestContextV1.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/RequestContextV1.java b/server-api/src/main/java/org/apache/atlas/RequestContextV1.java
index 08aa960..27fe3d2 100644
--- a/server-api/src/main/java/org/apache/atlas/RequestContextV1.java
+++ b/server-api/src/main/java/org/apache/atlas/RequestContextV1.java
@@ -19,14 +19,12 @@
package org.apache.atlas;
import org.apache.atlas.metrics.Metrics;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.instance.AtlasObjectId;
-import org.apache.atlas.typesystem.types.TypeSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.util.Collection;
-import java.util.LinkedHashSet;
-import java.util.Set;
+import java.util.*;
public class RequestContextV1 {
private static final Logger LOG = LoggerFactory.getLogger(RequestContextV1.class);
@@ -36,11 +34,11 @@ public class RequestContextV1 {
private Set<AtlasObjectId> createdEntityIds = new LinkedHashSet<>();
private Set<AtlasObjectId> updatedEntityIds = new LinkedHashSet<>();
private Set<AtlasObjectId> deletedEntityIds = new LinkedHashSet<>();
+ private Map<String, AtlasEntityWithExtInfo> entityCacheV2 = new HashMap<>();
private String user;
private final long requestTime;
- TypeSystem typeSystem = TypeSystem.getInstance();
private Metrics metrics = new Metrics();
private RequestContextV1() {
@@ -59,7 +57,16 @@ public class RequestContextV1 {
return ret;
}
+
public static void clear() {
+ RequestContextV1 instance = CURRENT_CONTEXT.get();
+
+ if (instance != null) {
+ if (instance.entityCacheV2 != null) {
+ instance.entityCacheV2.clear();
+ }
+ }
+
CURRENT_CONTEXT.remove();
}
@@ -91,6 +98,16 @@ public class RequestContextV1 {
deletedEntityIds.add(entityId);
}
+ /**
+ * Adds the specified instance to the cache
+ *
+ */
+ public void cache(AtlasEntityWithExtInfo entity) {
+ if (entity != null && entity.getEntity() != null && entity.getEntity().getGuid() != null) {
+ entityCacheV2.put(entity.getEntity().getGuid(), entity);
+ }
+ }
+
public Collection<AtlasObjectId> getCreatedEntityIds() {
return createdEntityIds;
}
@@ -103,6 +120,17 @@ public class RequestContextV1 {
return deletedEntityIds;
}
+ /**
+ * Checks if an instance with the given guid is in the cache for this request. Either returns the instance
+ * or null if it is not in the cache.
+ *
+ * @param guid the guid to find
+ * @return Either the instance or null if it is not in the cache.
+ */
+ public AtlasEntityWithExtInfo getInstanceV2(String guid) {
+ return entityCacheV2.get(guid);
+ }
+
public long getRequestTime() {
return requestTime;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/discovery/LineageService.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/discovery/LineageService.java b/server-api/src/main/java/org/apache/atlas/discovery/LineageService.java
deleted file mode 100644
index 5aab355..0000000
--- a/server-api/src/main/java/org/apache/atlas/discovery/LineageService.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery;
-
-import org.apache.atlas.AtlasException;
-
-/**
- * Lineage service interface.
- */
-public interface LineageService {
-
- /**
- * Return the lineage outputs graph for the given datasetName.
- *
- * @param datasetName datasetName
- * @return Outputs Graph as JSON
- */
- String getOutputsGraph(String datasetName) throws AtlasException;
-
- /**
- * Return the lineage inputs graph for the given datasetName.
- *
- * @param datasetName datasetName
- * @return Inputs Graph as JSON
- */
- String getInputsGraph(String datasetName) throws AtlasException;
-
- /**
- * Return the lineage inputs graph for the given entity id.
- *
- * @param guid entity id
- * @return Inputs Graph as JSON
- */
- String getInputsGraphForEntity(String guid) throws AtlasException;
-
- /**
- * Return the lineage inputs graph for the given entity id.
- *
- * @param guid entity id
- * @return Inputs Graph as JSON
- */
- String getOutputsGraphForEntity(String guid) throws AtlasException;
-
- /**
- * Return the schema for the given datasetName.
- *
- * @param datasetName datasetName
- * @return Schema as JSON
- */
- String getSchema(String datasetName) throws AtlasException;
-
- /**
- * Return the schema for the given entity id.
- *
- * @param guid tableName
- * @return Schema as JSON
- */
- String getSchemaForEntity(String guid) throws AtlasException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/exception/EntityNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/exception/EntityNotFoundException.java b/server-api/src/main/java/org/apache/atlas/exception/EntityNotFoundException.java
new file mode 100644
index 0000000..14be3ed
--- /dev/null
+++ b/server-api/src/main/java/org/apache/atlas/exception/EntityNotFoundException.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.exception;
+
+/**
+ * A simple wrapper for 404.
+ */
+public class EntityNotFoundException extends NotFoundException {
+ public EntityNotFoundException() {
+ }
+
+ public EntityNotFoundException(String message) {
+ super(message);
+ }
+
+ public EntityNotFoundException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public EntityNotFoundException(Throwable cause) {
+ super(cause);
+ }
+
+ public EntityNotFoundException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/exception/NotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/exception/NotFoundException.java b/server-api/src/main/java/org/apache/atlas/exception/NotFoundException.java
new file mode 100644
index 0000000..c24789d
--- /dev/null
+++ b/server-api/src/main/java/org/apache/atlas/exception/NotFoundException.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.exception;
+
+import org.apache.atlas.AtlasException;
+
+/**
+ * A simple wrapper for 404.
+ */
+public class NotFoundException extends AtlasException {
+ public NotFoundException() {
+ }
+
+ public NotFoundException(String message) {
+ super(message);
+ }
+
+ public NotFoundException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public NotFoundException(Throwable cause) {
+ super(cause);
+ }
+
+ public NotFoundException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/exception/SchemaNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/exception/SchemaNotFoundException.java b/server-api/src/main/java/org/apache/atlas/exception/SchemaNotFoundException.java
new file mode 100644
index 0000000..981ef2a
--- /dev/null
+++ b/server-api/src/main/java/org/apache/atlas/exception/SchemaNotFoundException.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.exception;
+
+public class SchemaNotFoundException extends NotFoundException {
+ public SchemaNotFoundException() {
+ }
+
+ public SchemaNotFoundException(String message) {
+ super(message);
+ }
+
+ public SchemaNotFoundException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public SchemaNotFoundException(Throwable cause) {
+ super(cause);
+ }
+
+ public SchemaNotFoundException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/exception/TraitNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/exception/TraitNotFoundException.java b/server-api/src/main/java/org/apache/atlas/exception/TraitNotFoundException.java
new file mode 100644
index 0000000..ba46a2e
--- /dev/null
+++ b/server-api/src/main/java/org/apache/atlas/exception/TraitNotFoundException.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.exception;
+
+/**
+ * A simple wrapper for 404.
+ * Thrown when a requested trait can not be found.
+ */
+public class TraitNotFoundException extends NotFoundException {
+ public TraitNotFoundException() {
+ }
+
+ public TraitNotFoundException(String message) {
+ super(message);
+ }
+
+ public TraitNotFoundException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public TraitNotFoundException(Throwable cause) {
+ super(cause);
+ }
+
+ public TraitNotFoundException(String message, Throwable cause, boolean enableSuppression,
+ boolean writableStackTrace) {
+ super(message, cause, enableSuppression, writableStackTrace);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/listener/EntityChangeListener.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/listener/EntityChangeListener.java b/server-api/src/main/java/org/apache/atlas/listener/EntityChangeListener.java
index e05a775..19fae4f 100644
--- a/server-api/src/main/java/org/apache/atlas/listener/EntityChangeListener.java
+++ b/server-api/src/main/java/org/apache/atlas/listener/EntityChangeListener.java
@@ -19,8 +19,8 @@
package org.apache.atlas.listener;
import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
import java.util.Collection;
@@ -36,7 +36,7 @@ public interface EntityChangeListener {
* @param isImport
* @throws AtlasException if the listener notification fails
*/
- void onEntitiesAdded(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException;
+ void onEntitiesAdded(Collection<Referenceable> entities, boolean isImport) throws AtlasException;
/**
* This is upon updating an entity.
@@ -46,7 +46,7 @@ public interface EntityChangeListener {
* @param isImport
* @throws AtlasException if the listener notification fails
*/
- void onEntitiesUpdated(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException;
+ void onEntitiesUpdated(Collection<Referenceable> entities, boolean isImport) throws AtlasException;
/**
* This is upon adding a new trait to a typed instance.
@@ -56,7 +56,7 @@ public interface EntityChangeListener {
*
* @throws AtlasException if the listener notification fails
*/
- void onTraitsAdded(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException;
+ void onTraitsAdded(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException;
/**
* This is upon deleting a trait from a typed instance.
@@ -66,7 +66,7 @@ public interface EntityChangeListener {
*
* @throws AtlasException if the listener notification fails
*/
- void onTraitsDeleted(ITypedReferenceableInstance entity, Collection<String> traitNames) throws AtlasException;
+ void onTraitsDeleted(Referenceable entity, Collection<String> traitNames) throws AtlasException;
/**
* This is upon updating a trait from a typed instance.
@@ -76,7 +76,7 @@ public interface EntityChangeListener {
*
* @throws AtlasException if the listener notification fails
*/
- void onTraitsUpdated(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException;
+ void onTraitsUpdated(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException;
/**
* This is upon deleting entities from the repository.
@@ -85,5 +85,5 @@ public interface EntityChangeListener {
* @param isImport
* @throws AtlasException
*/
- void onEntitiesDeleted(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException;
+ void onEntitiesDeleted(Collection<Referenceable> entities, boolean isImport) throws AtlasException;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/listener/TypesChangeListener.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/listener/TypesChangeListener.java b/server-api/src/main/java/org/apache/atlas/listener/TypesChangeListener.java
index 5ff6d4a..0fcec50 100644
--- a/server-api/src/main/java/org/apache/atlas/listener/TypesChangeListener.java
+++ b/server-api/src/main/java/org/apache/atlas/listener/TypesChangeListener.java
@@ -19,7 +19,7 @@
package org.apache.atlas.listener;
import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.type.AtlasType;
import java.util.Collection;
@@ -31,19 +31,19 @@ public interface TypesChangeListener {
/**
* This is upon adding new type(s) to Store.
*
- * @param dataTypes data type
+ * @param dataTypes the data types
* @throws AtlasException
*/
- void onAdd(Collection<? extends IDataType> dataTypes) throws AtlasException;
+ void onAdd(Collection<? extends AtlasType> dataTypes) throws AtlasException;
/**
* This is upon removing an existing type from the Store.
*
- * @param typeName type name
+ * @param dataTypes the data types
* @throws AtlasException
*/
// void onRemove(String typeName) throws MetadataException;
//This is upon updating an existing type to the store
- void onChange(Collection<? extends IDataType> dataTypes) throws AtlasException;
+ void onChange(Collection<? extends AtlasType> dataTypes) throws AtlasException;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/services/MetadataService.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/services/MetadataService.java b/server-api/src/main/java/org/apache/atlas/services/MetadataService.java
deleted file mode 100644
index d2aa457..0000000
--- a/server-api/src/main/java/org/apache/atlas/services/MetadataService.java
+++ /dev/null
@@ -1,317 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.services;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.codehaus.jettison.json.JSONObject;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Metadata service.
- */
-@Deprecated
-public interface MetadataService {
-
- /**
- * Creates a new type based on the type system to enable adding
- * entities (instances for types).
- *
- * @param typeDefinition definition as json
- * @return a unique id for this type
- */
- JSONObject createType(String typeDefinition) throws AtlasException;
-
- /**z
- * Updates the given types in the type definition
- * @param typeDefinition
- * @return
- * @throws AtlasException
- */
- JSONObject updateType(String typeDefinition) throws AtlasException;
-
- /**
- * Return the definition for the given type.
- *
- * @param typeName name for this type, must be unique
- * @return type definition as JSON
- */
- String getTypeDefinition(String typeName) throws AtlasException;
-
- /**
- * Return the list of type names in the type system which match the specified filter.
- *
- * @return list of type names
- * @param filterMap - Map of filter for type names. Valid keys are CATEGORY, SUPERTYPE, NOT_SUPERTYPE
- * For example, CATEGORY = TRAIT && SUPERTYPE contains 'X' && SUPERTYPE !contains 'Y'
- * If there is no filter, all the types are returned
- */
- List<String> getTypeNames(Map<TypeCache.TYPE_FILTER, String> filterMap) throws AtlasException;
-
- /**
- * Creates an entity, instance of the type.
- *
- * @param entityDefinition definition
- * @return CreateUpdateEntitiesResult with the guids of the entities created
- */
- CreateUpdateEntitiesResult createEntities(String entityDefinition) throws AtlasException;
-
- /**
- * Get a typed entity instance.
- *
- * @param entity entity
- * @return typed entity instance
- *
- * @throws AtlasException if any failure occurs
- */
- ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entity) throws AtlasException;
-
- /**
- * Create entity instances.
- *
- * @param typedInstances instance to create
- * @return CreateUpdateEntitiesResult with the guids of the entities created
- *
- * @throws AtlasException if unable to create the entities
- */
- CreateUpdateEntitiesResult createEntities(ITypedReferenceableInstance[] typedInstances) throws AtlasException;
-
-
- /**
- * Return the definition for the given guid.
- *
- * @param guid guid
- * @return entity definition as JSON
- */
- String getEntityDefinitionJson(String guid) throws AtlasException;
-
- ITypedReferenceableInstance getEntityDefinition(String guid) throws AtlasException;
-
-
- /**
- * Return the definition given type and attribute. The attribute has to be unique attribute for the type
- * @param entityType - type name
- * @param attribute - attribute name
- * @param value - attribute value
- * @return
- * @throws AtlasException
- */
- ITypedReferenceableInstance getEntityDefinitionReference(String entityType, String attribute, String value) throws AtlasException;
-
- /**
- * Return the definition given type and attribute. The attribute has to be unique attribute for the type
- * @param entityType - type name
- * @param attribute - attribute name
- * @param value - attribute value
- * @return
- * @throws AtlasException
- */
- String getEntityDefinition(String entityType, String attribute, String value) throws AtlasException;
-
- /**
- * Return the list of entity names for the given type in the repository.
- *
- * @param entityType type
- * @return list of entity names for the given type in the repository
- */
- List<String> getEntityList(String entityType) throws AtlasException;
-
- /**
- * Adds the property to the given entity id(guid).
- * Currently supports updates only on PRIMITIVE, CLASS attribute types
- * @param guid entity id
- * @param attribute property name
- * @param value property value
- * @return {@link CreateUpdateEntitiesResult} with the guids of the entities that were created/updated
- */
- CreateUpdateEntitiesResult updateEntityAttributeByGuid(String guid, String attribute, String value) throws AtlasException;
-
- /**
- * Supports Partial updates of an entity. Users can update a subset of attributes for an entity identified by its guid
- * Note however that it cannot be used to set attribute values to null or delete attrbute values
- * @param guid entity id
- * @param entity
- * @return {@link CreateUpdateEntitiesResult} with the guids of the entities that were created/updated
- * @throws AtlasException
- */
- CreateUpdateEntitiesResult updateEntityPartialByGuid(String guid, Referenceable entity) throws AtlasException;
-
- /**
- * Batch API - Adds/Updates the given entity id(guid).
- *
- * @param entityJson entity json
- * @return {@link CreateUpdateEntitiesResult} with the guids of the entities that were created/updated
- */
- CreateUpdateEntitiesResult updateEntities(String entityJson) throws AtlasException;
-
-
- /**
- * Batch API - Adds/Updates the given entity id(guid).
- *
- * @param entityJson entity json
- * @return {@link CreateUpdateEntitiesResult} with the guids of the entities that were created/updated
- */
- CreateUpdateEntitiesResult updateEntities(ITypedReferenceableInstance[] iTypedReferenceableInstances) throws AtlasException;
-
- // Trait management functions
-
- /**
- * Updates entity identified by a qualified name
- *
- * @param typeName
- * @param uniqueAttributeName
- * @param attrValue
- * @param updatedEntity
- * @return Guid of updated entity
- * @throws AtlasException
- */
- CreateUpdateEntitiesResult updateEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
- String attrValue,
- Referenceable updatedEntity) throws AtlasException;
-
- /**
- * Gets the list of trait names for a given entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @return a list of trait names for the given entity guid
- * @throws AtlasException
- */
- List<String> getTraitNames(String guid) throws AtlasException;
-
- /**
- * Adds a new trait to an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitInstanceDefinition trait instance that needs to be added to entity
- * @throws AtlasException
- */
- void addTrait(String guid, String traitInstanceDefinition) throws AtlasException;
-
- /**
- * Adds a new trait to an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitInstance trait instance to add *
- * @throws AtlasException if unable to add the trait instance
- */
- void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException;
-
-
- /**
- * Adds a new trait to a list of existing entities represented by their respective guids
- * @param entityGuids list of guids of entities
- * @param traitInstance trait instance json that needs to be added to entities
- * @throws AtlasException
- */
- void addTrait(List<String> entityGuids, ITypedStruct traitInstance) throws AtlasException;
-
- /**
- * Create a typed trait instance.
- *
- * @param traitInstance trait instance
- * @return a typed trait instance
- * @throws AtlasException if unable to create the typed trait instance
- */
- ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException;
-
- /**
- * Return trait definition of a single trait for a given entity
- * @param guid - Guid of the entity to which the trait is tagged
- * @param traitName - Name of the trait
- * @return
- * @throws AtlasException
- */
- IStruct getTraitDefinition(String guid, String traitName) throws AtlasException;
-
- /**
- * Deletes a given trait from an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitNameToBeDeleted name of the trait
- * @throws AtlasException
- */
- void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException;
-
- /**
- * Delete the specified entities from the repository
- *
- * @param guids entity guids to be deleted
- * @return List of guids for deleted entities
- * @throws AtlasException
- */
- EntityResult deleteEntities(List<String> guids) throws AtlasException;
-
- /**
- * Register a listener for entity change.
- *
- * @param listener the listener to register
- */
- void registerListener(EntityChangeListener listener);
-
- /**
- * Unregister an entity change listener.
- *
- * @param listener the listener to unregister
- */
- void unregisterListener(EntityChangeListener listener);
-
- /**
- * Delete the specified entity from the repository identified by its unique attribute (including its composite references)
- *
- * @param typeName The entity's type
- * @param uniqueAttributeName attribute name by which the entity could be identified uniquely
- * @param attrValue attribute value by which the entity could be identified uniquely
- * @return List of guids for deleted entities (including their composite references)
- * @throws AtlasException
- */
- EntityResult deleteEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
- String attrValue) throws AtlasException;
-
- /**
- * Returns entity audit events for entity id in the decreasing order of timestamp
- * @param guid entity id
- * @param startKey key for the first event, used for pagination
- * @param count number of events to be returned
- * @return
- */
- List<EntityAuditEvent> getAuditEvents(String guid, String startKey, short count) throws AtlasException;
-
- /**
- * Deserializes entity instances into ITypedReferenceableInstance array.
- * @param entityInstanceDefinition
- * @return ITypedReferenceableInstance[]
- * @throws AtlasException
- */
- ITypedReferenceableInstance[] deserializeClassInstances(String entityInstanceDefinition) throws AtlasException;
-
- ITypedReferenceableInstance validateAndConvertToTypedInstance(IReferenceableInstance updatedEntity, String typeName)
- throws AtlasException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityExistsException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityExistsException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityExistsException.java
deleted file mode 100644
index b16cfa9..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityExistsException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-
-public class EntityExistsException extends AtlasException {
- public EntityExistsException(IReferenceableInstance typedInstance, Exception e) {
- super("Model violation for type "+ typedInstance.getTypeName(), e);
- }
-
- public EntityExistsException(IReferenceableInstance typedInstance) {
- super("Model violation for type "+ typedInstance.getTypeName());
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityNotFoundException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityNotFoundException.java
deleted file mode 100644
index 7579b80..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/EntityNotFoundException.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-/**
- * A simple wrapper for 404.
- */
-public class EntityNotFoundException extends NotFoundException {
- public EntityNotFoundException() {
- }
-
- public EntityNotFoundException(String message) {
- super(message);
- }
-
- public EntityNotFoundException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public EntityNotFoundException(Throwable cause) {
- super(cause);
- }
-
- public EntityNotFoundException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/NotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/NotFoundException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/NotFoundException.java
deleted file mode 100644
index b45b970..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/NotFoundException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-import org.apache.atlas.AtlasException;
-
-/**
- * A simple wrapper for 404.
- */
-public class NotFoundException extends AtlasException {
- public NotFoundException() {
- }
-
- public NotFoundException(String message) {
- super(message);
- }
-
- public NotFoundException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public NotFoundException(Throwable cause) {
- super(cause);
- }
-
- public NotFoundException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/NullRequiredAttributeException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/NullRequiredAttributeException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/NullRequiredAttributeException.java
deleted file mode 100644
index db4b054..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/NullRequiredAttributeException.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.exception;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.Multiplicity;
-
-
-/**
- * Thrown when a repository operation attempts to
- * unset an attribute that is defined as required in the
- * type system. A required attribute has a non-zero
- * lower bound in its multiplicity.
- *
- * @see Multiplicity#REQUIRED
- * @see Multiplicity#COLLECTION
- * @see Multiplicity#SET
- *
- */
-public class NullRequiredAttributeException extends AtlasException {
-
- private static final long serialVersionUID = 4023597038462910948L;
-
- public NullRequiredAttributeException() {
- super();
- }
-
- public NullRequiredAttributeException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-
- public NullRequiredAttributeException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public NullRequiredAttributeException(String message) {
- super(message);
- }
-
- public NullRequiredAttributeException(Throwable cause) {
- super(cause);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/SchemaNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/SchemaNotFoundException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/SchemaNotFoundException.java
deleted file mode 100644
index 55b4088..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/SchemaNotFoundException.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.exception;
-
-public class SchemaNotFoundException extends NotFoundException {
- public SchemaNotFoundException() {
- }
-
- public SchemaNotFoundException(String message) {
- super(message);
- }
-
- public SchemaNotFoundException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public SchemaNotFoundException(Throwable cause) {
- super(cause);
- }
-
- public SchemaNotFoundException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
[38/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/java/org/apache/atlas/TestUtilsV2.java
----------------------------------------------------------------------
diff --git a/intg/src/test/java/org/apache/atlas/TestUtilsV2.java b/intg/src/test/java/org/apache/atlas/TestUtilsV2.java
index 91fdb47..bbccf77 100755
--- a/intg/src/test/java/org/apache/atlas/TestUtilsV2.java
+++ b/intg/src/test/java/org/apache/atlas/TestUtilsV2.java
@@ -18,8 +18,6 @@
package org.apache.atlas;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
@@ -30,7 +28,6 @@ import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef;
-import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality;
@@ -92,7 +89,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createRequiredAttrDef("city", "string"));
AtlasEntityDef deptTypeDef =
- AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
new AtlasAttributeDef("employees", String.format("array<%s>", "Employee"), true,
AtlasAttributeDef.Cardinality.SINGLE, 0, 1, false, false,
@@ -100,7 +97,7 @@ public final class TestUtilsV2 {
add(new AtlasStructDef.AtlasConstraintDef(AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF));
}}));
- AtlasEntityDef personTypeDef = AtlasTypeUtil.createClassTypeDef("Person", "Person"+_description, ImmutableSet.<String>of(),
+ AtlasEntityDef personTypeDef = AtlasTypeUtil.createClassTypeDef("Person", "Person"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("address", "Address"),
AtlasTypeUtil.createOptionalAttrDef("birthday", "date"),
@@ -113,7 +110,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createOptionalAttrDef("approximationOfPi", "bigdecimal")
);
- AtlasEntityDef employeeTypeDef = AtlasTypeUtil.createClassTypeDef("Employee", "Employee"+_description, ImmutableSet.of("Person"),
+ AtlasEntityDef employeeTypeDef = AtlasTypeUtil.createClassTypeDef("Employee", "Employee"+_description, Collections.singleton("Person"),
AtlasTypeUtil.createOptionalAttrDef("orgLevel", "OrgLevel"),
new AtlasAttributeDef("department", "Department", false,
AtlasAttributeDef.Cardinality.SINGLE, 1, 1,
@@ -142,18 +139,18 @@ public final class TestUtilsV2 {
put(AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE, "employees");
}}));
- AtlasEntityDef managerTypeDef = AtlasTypeUtil.createClassTypeDef("Manager", "Manager"+_description, ImmutableSet.of("Employee"),
+ AtlasEntityDef managerTypeDef = AtlasTypeUtil.createClassTypeDef("Manager", "Manager"+_description, Collections.singleton("Employee"),
new AtlasAttributeDef("subordinates", String.format("array<%s>", "Employee"), false, AtlasAttributeDef.Cardinality.SET,
1, 10, false, false,
Collections.<AtlasConstraintDef>emptyList()));
AtlasClassificationDef securityClearanceTypeDef =
- AtlasTypeUtil.createTraitTypeDef("SecurityClearance", "SecurityClearance"+_description, ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef("SecurityClearance", "SecurityClearance"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
- AtlasTypesDef ret = new AtlasTypesDef(ImmutableList.of(orgLevelEnum), ImmutableList.of(addressDetails),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.of(deptTypeDef, personTypeDef, employeeTypeDef, managerTypeDef));
+ AtlasTypesDef ret = new AtlasTypesDef(Collections.singletonList(orgLevelEnum), Collections.singletonList(addressDetails),
+ Collections.singletonList(securityClearanceTypeDef),
+ Arrays.asList(deptTypeDef, personTypeDef, employeeTypeDef, managerTypeDef));
populateSystemAttributes(ret);
@@ -161,7 +158,7 @@ public final class TestUtilsV2 {
}
public static AtlasTypesDef defineInverseReferenceTestTypes() {
- AtlasEntityDef aDef = AtlasTypeUtil.createClassTypeDef("A", ImmutableSet.<String>of(),
+ AtlasEntityDef aDef = AtlasTypeUtil.createClassTypeDef("A", Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
new AtlasAttributeDef("b", "B", true, Cardinality.SINGLE, 0, 1, false, false, Collections.<AtlasConstraintDef>emptyList()), // 1-1
new AtlasAttributeDef("oneB", "B", true, Cardinality.SINGLE, 0, 1, false, false, Collections.<AtlasConstraintDef>emptyList()), // 1-*
@@ -170,7 +167,7 @@ public final class TestUtilsV2 {
Collections.<AtlasConstraintDef>singletonList(new AtlasConstraintDef(
AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF, Collections.<String, Object>singletonMap(AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE, "mappedFromA"))))); // *-*
- AtlasEntityDef bDef = AtlasTypeUtil.createClassTypeDef("B", ImmutableSet.<String>of(),
+ AtlasEntityDef bDef = AtlasTypeUtil.createClassTypeDef("B", Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
new AtlasAttributeDef("a", "A", true, Cardinality.SINGLE, 0, 1, false, false,
Collections.<AtlasConstraintDef>singletonList(new AtlasConstraintDef(
@@ -183,7 +180,7 @@ public final class TestUtilsV2 {
AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF, Collections.<String, Object>singletonMap(AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE, "manyB")))),
new AtlasAttributeDef("mappedFromA", "A", true, Cardinality.SINGLE, 0, 1, false, false, Collections.<AtlasConstraintDef>emptyList()));
- AtlasTypesDef ret = new AtlasTypesDef(ImmutableList.<AtlasEnumDef>of(), ImmutableList.<AtlasStructDef>of(), ImmutableList.<AtlasClassificationDef>of(), ImmutableList.<AtlasEntityDef>of(aDef, bDef));
+ AtlasTypesDef ret = new AtlasTypesDef(Collections.<AtlasEnumDef>emptyList(), Collections.<AtlasStructDef>emptyList(), Collections.<AtlasClassificationDef>emptyList(), Arrays.asList(aDef, bDef));
populateSystemAttributes(ret);
@@ -207,7 +204,7 @@ public final class TestUtilsV2 {
AtlasEntityDef deptTypeDef =
AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description,
- ImmutableSet.<String>of(),
+ Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("dep-code", "string"),
new AtlasAttributeDef("employees", String.format("array<%s>", "Employee"), true,
@@ -217,7 +214,7 @@ public final class TestUtilsV2 {
}}));
AtlasEntityDef personTypeDef = AtlasTypeUtil.createClassTypeDef("Person", "Person"+_description,
- ImmutableSet.<String>of(),
+ Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("email", "string"),
AtlasTypeUtil.createOptionalAttrDef("address", "Address"),
@@ -232,7 +229,7 @@ public final class TestUtilsV2 {
);
AtlasEntityDef employeeTypeDef = AtlasTypeUtil.createClassTypeDef("Employee", "Employee"+_description,
- ImmutableSet.of("Person"),
+ Collections.singleton("Person"),
AtlasTypeUtil.createOptionalAttrDef("orgLevel", "OrgLevel"),
AtlasTypeUtil.createOptionalAttrDef("empCode", "string"),
new AtlasAttributeDef("department", "Department", false,
@@ -258,19 +255,19 @@ public final class TestUtilsV2 {
);
AtlasEntityDef managerTypeDef = AtlasTypeUtil.createClassTypeDef("Manager", "Manager"+_description,
- ImmutableSet.of("Employee"),
+ Collections.singleton("Employee"),
new AtlasAttributeDef("subordinates", String.format("array<%s>", "Employee"), false, AtlasAttributeDef.Cardinality.SET,
1, 10, false, false,
Collections.<AtlasConstraintDef>emptyList()));
AtlasClassificationDef securityClearanceTypeDef =
- AtlasTypeUtil.createTraitTypeDef("SecurityClearance", "SecurityClearance"+_description, ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef("SecurityClearance", "SecurityClearance"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
- AtlasTypesDef ret = new AtlasTypesDef(ImmutableList.of(orgLevelEnum),
- ImmutableList.of(addressDetails),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.of(deptTypeDef, personTypeDef, employeeTypeDef, managerTypeDef));
+ AtlasTypesDef ret = new AtlasTypesDef(Collections.singletonList(orgLevelEnum),
+ Collections.singletonList(addressDetails),
+ Collections.singletonList(securityClearanceTypeDef),
+ Arrays.asList(deptTypeDef, personTypeDef, employeeTypeDef, managerTypeDef));
populateSystemAttributes(ret);
@@ -295,7 +292,7 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createRequiredAttrDef("zip", "int"));
AtlasEntityDef deptTypeDef =
- AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("dep-code", "string"),
new AtlasAttributeDef("employees", String.format("array<%s>", "Person"), true,
@@ -304,7 +301,7 @@ public final class TestUtilsV2 {
add(new AtlasStructDef.AtlasConstraintDef(AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF));
}}));
- AtlasEntityDef personTypeDef = AtlasTypeUtil.createClassTypeDef("Person", "Person"+_description, ImmutableSet.<String>of(),
+ AtlasEntityDef personTypeDef = AtlasTypeUtil.createClassTypeDef("Person", "Person"+_description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("emp-code", "string"),
AtlasTypeUtil.createOptionalAttrDef("orgLevel", "OrgLevel"),
@@ -338,10 +335,10 @@ public final class TestUtilsV2 {
AtlasTypeUtil.createOptionalAttrDef("approximationOfPi", "bigdecimal")
);
- AtlasTypesDef ret = new AtlasTypesDef(ImmutableList.of(orgLevelEnum),
- ImmutableList.of(addressDetails),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.of(deptTypeDef, personTypeDef));
+ AtlasTypesDef ret = new AtlasTypesDef(Collections.singletonList(orgLevelEnum),
+ Collections.singletonList(addressDetails),
+ Collections.<AtlasClassificationDef>emptyList(),
+ Arrays.asList(deptTypeDef, personTypeDef));
populateSystemAttributes(ret);
@@ -390,7 +387,7 @@ public final class TestUtilsV2 {
julius.setAttribute("name", "Julius");
julius.setAttribute("department", hrDeptId);
julius.setAttribute("address", juliusAddr);
- julius.setAttribute("subordinates", ImmutableList.of());
+ julius.setAttribute("subordinates", Collections.emptyList());
/******* Employee - Max (Manager: Jane, Mentor: Julius) *******/
AtlasEntity max = new AtlasEntity(EMPLOYEE_TYPE);
@@ -430,8 +427,8 @@ public final class TestUtilsV2 {
john.setAttribute("numberOfStarsEstimate", new BigInteger("1000000000000000000000"));
john.setAttribute("approximationOfPi", new BigDecimal("3.141592653589793238462643383279502884197169399375105820974944592307816406286"));
- jane.setAttribute("subordinates", ImmutableList.of(johnId, maxId));
- hrDept.setAttribute("employees", ImmutableList.of(janeId, juliusId, maxId, johnId));
+ jane.setAttribute("subordinates", Arrays.asList(johnId, maxId));
+ hrDept.setAttribute("employees", Arrays.asList(janeId, juliusId, maxId, johnId));
entitiesWithExtInfo.addEntity(hrDept);
entitiesWithExtInfo.addEntity(jane);
@@ -487,7 +484,7 @@ public final class TestUtilsV2 {
juliusAddr.setAttribute("street", "Madison Ave");
juliusAddr.setAttribute("city", "Newtonville");
julius.setAttribute("address", juliusAddr);
- julius.setAttribute("subordinates", ImmutableList.of());
+ julius.setAttribute("subordinates", Collections.emptyList());
AtlasObjectId janeId = AtlasTypeUtil.getAtlasObjectId(jane);
AtlasObjectId johnId = AtlasTypeUtil.getAtlasObjectId(john);
@@ -516,9 +513,9 @@ public final class TestUtilsV2 {
john.setAttribute("manager", janeId);
john.setAttribute("mentor", maxId);
- hrDept.setAttribute("employees", ImmutableList.of(johnId, janeId, juliusId, maxId));
+ hrDept.setAttribute("employees", Arrays.asList(johnId, janeId, juliusId, maxId));
- jane.setAttribute("subordinates", ImmutableList.of(johnId, maxId));
+ jane.setAttribute("subordinates", Arrays.asList(johnId, maxId));
deptEmpEntities.put(jane.getGuid(), jane);
deptEmpEntities.put(john.getGuid(), john);
@@ -549,20 +546,20 @@ public final class TestUtilsV2 {
public static AtlasTypesDef simpleType(){
AtlasEntityDef superTypeDefinition =
- AtlasTypeUtil.createClassTypeDef("h_type", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef("h_type", Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("attr", "string"));
AtlasStructDef structTypeDefinition = new AtlasStructDef("s_type", "structType", "1.0",
Arrays.asList(AtlasTypeUtil.createRequiredAttrDef("name", "string")));
AtlasClassificationDef traitTypeDefinition =
- AtlasTypeUtil.createTraitTypeDef("t_type", "traitType", ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef("t_type", "traitType", Collections.<String>emptySet());
AtlasEnumDef enumTypeDefinition = new AtlasEnumDef("e_type", "enumType", "1.0",
Arrays.asList(new AtlasEnumElementDef("ONE", "Element Description", 1)));
- AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
- ImmutableList.of(traitTypeDefinition), ImmutableList.of(superTypeDefinition));
+ AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(Collections.singletonList(enumTypeDefinition), Collections.singletonList(structTypeDefinition),
+ Collections.singletonList(traitTypeDefinition), Collections.singletonList(superTypeDefinition));
populateSystemAttributes(ret);
@@ -571,23 +568,23 @@ public final class TestUtilsV2 {
public static AtlasTypesDef simpleTypeUpdated(){
AtlasEntityDef superTypeDefinition =
- AtlasTypeUtil.createClassTypeDef("h_type", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef("h_type", Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("attr", "string"));
AtlasEntityDef newSuperTypeDefinition =
- AtlasTypeUtil.createClassTypeDef("new_h_type", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef("new_h_type", Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("attr", "string"));
AtlasStructDef structTypeDefinition = new AtlasStructDef("s_type", "structType", "1.0",
Arrays.asList(AtlasTypeUtil.createRequiredAttrDef("name", "string")));
AtlasClassificationDef traitTypeDefinition =
- AtlasTypeUtil.createTraitTypeDef("t_type", "traitType", ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef("t_type", "traitType", Collections.<String>emptySet());
AtlasEnumDef enumTypeDefinition = new AtlasEnumDef("e_type", "enumType",
Arrays.asList(new AtlasEnumElementDef("ONE", "Element Description", 1)));
- AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
- ImmutableList.of(traitTypeDefinition), ImmutableList.of(superTypeDefinition, newSuperTypeDefinition));
+ AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(Collections.singletonList(enumTypeDefinition), Collections.singletonList(structTypeDefinition),
+ Collections.singletonList(traitTypeDefinition), Arrays.asList(superTypeDefinition, newSuperTypeDefinition));
populateSystemAttributes(ret);
@@ -596,13 +593,13 @@ public final class TestUtilsV2 {
public static AtlasTypesDef simpleTypeUpdatedDiff() {
AtlasEntityDef newSuperTypeDefinition =
- AtlasTypeUtil.createClassTypeDef("new_h_type", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef("new_h_type", Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("attr", "string"));
- AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.of(newSuperTypeDefinition));
+ AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(Collections.<AtlasEnumDef>emptyList(),
+ Collections.<AtlasStructDef>emptyList(),
+ Collections.<AtlasClassificationDef>emptyList(),
+ Collections.singletonList(newSuperTypeDefinition));
populateSystemAttributes(ret);
@@ -613,12 +610,12 @@ public final class TestUtilsV2 {
public static AtlasTypesDef defineHiveTypes() {
String _description = "_description";
AtlasEntityDef superTypeDefinition =
- AtlasTypeUtil.createClassTypeDef(SUPER_TYPE_NAME, "SuperType_description", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef(SUPER_TYPE_NAME, "SuperType_description", Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("namespace", "string"),
AtlasTypeUtil.createOptionalAttrDef("cluster", "string"),
AtlasTypeUtil.createOptionalAttrDef("colo", "string"));
AtlasEntityDef databaseTypeDefinition =
- AtlasTypeUtil.createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE + _description,ImmutableSet.of(SUPER_TYPE_NAME),
+ AtlasTypeUtil.createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE + _description,Collections.singleton(SUPER_TYPE_NAME),
AtlasTypeUtil.createUniqueRequiredAttrDef(NAME, "string"),
AtlasTypeUtil.createOptionalAttrDef("isReplicated", "boolean"),
AtlasTypeUtil.createOptionalAttrDef("created", "string"),
@@ -640,7 +637,7 @@ public final class TestUtilsV2 {
AtlasEntityDef columnsDefinition =
AtlasTypeUtil.createClassTypeDef(COLUMN_TYPE, COLUMN_TYPE + "_description",
- ImmutableSet.<String>of(),
+ Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("type", "string"),
AtlasTypeUtil.createOptionalAttrDef("description", "string"),
@@ -684,7 +681,7 @@ public final class TestUtilsV2 {
AtlasEntityDef storageDescClsDef =
new AtlasEntityDef(STORAGE_DESC_TYPE, STORAGE_DESC_TYPE + _description, "1.0",
- Arrays.asList(attributeDefinitions), ImmutableSet.of(SUPER_TYPE_NAME));
+ Arrays.asList(attributeDefinitions), Collections.singleton(SUPER_TYPE_NAME));
AtlasAttributeDef[] partClsAttributes = new AtlasAttributeDef[]{
new AtlasAttributeDef("values", "array<string>",
@@ -720,7 +717,7 @@ public final class TestUtilsV2 {
AtlasEntityDef partClsDef =
new AtlasEntityDef("partition_class_type", "partition_class_type" + _description, "1.0",
- Arrays.asList(partClsAttributes), ImmutableSet.of(SUPER_TYPE_NAME));
+ Arrays.asList(partClsAttributes), Collections.singleton(SUPER_TYPE_NAME));
AtlasEntityDef processClsType =
new AtlasEntityDef(PROCESS_TYPE, PROCESS_TYPE + _description, "1.0",
@@ -728,10 +725,10 @@ public final class TestUtilsV2 {
AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
false, false,
Collections.<AtlasConstraintDef>emptyList())),
- ImmutableSet.<String>of());
+ Collections.<String>emptySet());
AtlasEntityDef tableTypeDefinition =
- AtlasTypeUtil.createClassTypeDef(TABLE_TYPE, TABLE_TYPE + _description, ImmutableSet.of(SUPER_TYPE_NAME),
+ AtlasTypeUtil.createClassTypeDef(TABLE_TYPE, TABLE_TYPE + _description, Collections.singleton(SUPER_TYPE_NAME),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("description", "string"),
AtlasTypeUtil.createRequiredAttrDef("type", "string"),
@@ -810,26 +807,25 @@ public final class TestUtilsV2 {
));
AtlasClassificationDef piiTypeDefinition =
- AtlasTypeUtil.createTraitTypeDef(PII, PII + _description, ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef(PII, PII + _description, Collections.<String>emptySet());
AtlasClassificationDef classificationTypeDefinition =
- AtlasTypeUtil.createTraitTypeDef(CLASSIFICATION, CLASSIFICATION + _description, ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef(CLASSIFICATION, CLASSIFICATION + _description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("tag", "string"));
AtlasClassificationDef fetlClassificationTypeDefinition =
- AtlasTypeUtil.createTraitTypeDef("fetl" + CLASSIFICATION, "fetl" + CLASSIFICATION + _description, ImmutableSet.of(CLASSIFICATION),
+ AtlasTypeUtil.createTraitTypeDef("fetl" + CLASSIFICATION, "fetl" + CLASSIFICATION + _description, Collections.singleton(CLASSIFICATION),
AtlasTypeUtil.createRequiredAttrDef("tag", "string"));
- AtlasClassificationDef phiTypeDefinition = AtlasTypeUtil.createTraitTypeDef(PHI, PHI + _description, ImmutableSet.<String>of(),
+ AtlasClassificationDef phiTypeDefinition = AtlasTypeUtil.createTraitTypeDef(PHI, PHI + _description, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("stringAttr", "string"),
AtlasTypeUtil.createRequiredAttrDef("booleanAttr", "boolean"),
AtlasTypeUtil.createRequiredAttrDef("integerAttr", "int"));
- AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(ImmutableList.of(enumTypeDefinition),
- ImmutableList.of(structTypeDefinition, partitionDefinition),
- ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition, piiTypeDefinition, phiTypeDefinition),
- ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition,
- storageDescClsDef, partClsDef, processClsType));
+ AtlasTypesDef ret = AtlasTypeUtil.getTypesDef(Collections.singletonList(enumTypeDefinition),
+ Arrays.asList(structTypeDefinition, partitionDefinition),
+ Arrays.asList(classificationTypeDefinition, fetlClassificationTypeDefinition, piiTypeDefinition, phiTypeDefinition),
+ Arrays.asList(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition, storageDescClsDef, partClsDef, processClsType));
populateSystemAttributes(ret);
@@ -840,6 +836,10 @@ public final class TestUtilsV2 {
return RandomStringUtils.randomAlphanumeric(10);
}
+ public static final String randomString(int count) {
+ return RandomStringUtils.randomAlphanumeric(count);
+ }
+
public static AtlasEntity createDBEntity() {
String dbName = RandomStringUtils.randomAlphanumeric(10);
return createDBEntity(dbName);
@@ -990,11 +990,11 @@ public final class TestUtilsV2 {
public static List<AtlasClassificationDef> getClassificationWithValidSuperType() {
AtlasClassificationDef securityClearanceTypeDef =
- AtlasTypeUtil.createTraitTypeDef("SecurityClearance1", "SecurityClearance_description", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef("SecurityClearance1", "SecurityClearance_description", Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
AtlasClassificationDef janitorSecurityClearanceTypeDef =
- AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", ImmutableSet.of("SecurityClearance1"),
+ AtlasTypeUtil.createTraitTypeDef("JanitorClearance", "JanitorClearance_description", Collections.singleton("SecurityClearance1"),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
List<AtlasClassificationDef> ret = Arrays.asList(securityClearanceTypeDef, janitorSecurityClearanceTypeDef);
@@ -1006,7 +1006,7 @@ public final class TestUtilsV2 {
public static List<AtlasClassificationDef> getClassificationWithName(String name) {
AtlasClassificationDef classificationTypeDef =
- AtlasTypeUtil.createTraitTypeDef(name, "s_description", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef(name, "s_description", Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
@@ -1019,7 +1019,7 @@ public final class TestUtilsV2 {
public static AtlasClassificationDef getSingleClassificationWithName(String name) {
AtlasClassificationDef classificaitonTypeDef =
- AtlasTypeUtil.createTraitTypeDef(name, "s_description", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createTraitTypeDef(name, "s_description", Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("level", "int"));
populateSystemAttributes(classificaitonTypeDef);
@@ -1032,7 +1032,7 @@ public final class TestUtilsV2 {
}
public static List<AtlasEntityDef> getEntityWithValidSuperType() {
- AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef("Developer", "Developer_description", ImmutableSet.of("Employee"),
+ AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef("Developer", "Developer_description", Collections.singleton("Employee"),
new AtlasAttributeDef("language", String.format("array<%s>", "string"), false, AtlasAttributeDef.Cardinality.SET,
1, 10, false, false,
Collections.<AtlasConstraintDef>emptyList()));
@@ -1045,7 +1045,7 @@ public final class TestUtilsV2 {
}
public static List<AtlasEntityDef> getEntityWithName(String name) {
- AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef(name, "Developer_description", ImmutableSet.<String>of(),
+ AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef(name, "Developer_description", Collections.<String>emptySet(),
new AtlasAttributeDef("language", String.format("array<%s>", "string"), false, AtlasAttributeDef.Cardinality.SET,
1, 10, false, false,
Collections.<AtlasConstraintDef>emptyList()));
@@ -1058,7 +1058,7 @@ public final class TestUtilsV2 {
}
public static AtlasEntityDef getSingleEntityWithName(String name) {
- AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef(name, "Developer_description", ImmutableSet.<String>of(),
+ AtlasEntityDef developerTypeDef = AtlasTypeUtil.createClassTypeDef(name, "Developer_description", Collections.<String>emptySet(),
new AtlasAttributeDef("language", String.format("array<%s>", "string"), false, AtlasAttributeDef.Cardinality.SET,
1, 10, false, false,
Collections.<AtlasConstraintDef>emptyList()));
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/java/org/apache/atlas/type/TestAtlasRelationshipType.java
----------------------------------------------------------------------
diff --git a/intg/src/test/java/org/apache/atlas/type/TestAtlasRelationshipType.java b/intg/src/test/java/org/apache/atlas/type/TestAtlasRelationshipType.java
index 8f3ac5b..819dfe5 100644
--- a/intg/src/test/java/org/apache/atlas/type/TestAtlasRelationshipType.java
+++ b/intg/src/test/java/org/apache/atlas/type/TestAtlasRelationshipType.java
@@ -17,7 +17,6 @@
*/
package org.apache.atlas.type;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
@@ -33,10 +32,7 @@ import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinali
import org.testng.Assert;
import org.testng.annotations.Test;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import static org.testng.Assert.fail;
@@ -195,22 +191,22 @@ public class TestAtlasRelationshipType {
}
private void createEmployeeTypes() throws AtlasBaseException {
- AtlasEntityDef phoneDef = AtlasTypeUtil.createClassTypeDef(PHONE_TYPE, getDescription(PHONE_TYPE), ImmutableSet.<String>of(),
+ AtlasEntityDef phoneDef = AtlasTypeUtil.createClassTypeDef(PHONE_TYPE, getDescription(PHONE_TYPE), Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("phone_number", "int"),
AtlasTypeUtil.createOptionalAttrDef("area_code", "int"),
AtlasTypeUtil.createOptionalAttrDef("owner", EMPLOYEE_TYPE));
- AtlasEntityDef employeeDef = AtlasTypeUtil.createClassTypeDef(EMPLOYEE_TYPE, getDescription(EMPLOYEE_TYPE), ImmutableSet.<String>of(),
+ AtlasEntityDef employeeDef = AtlasTypeUtil.createClassTypeDef(EMPLOYEE_TYPE, getDescription(EMPLOYEE_TYPE), Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("dob", "date"),
AtlasTypeUtil.createOptionalAttrDef("age", "int"),
AtlasTypeUtil.createRequiredAttrDef("phone_no", PHONE_TYPE));
- AtlasEntityDef departmentDef = AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, getDescription(DEPARTMENT_TYPE), ImmutableSet.<String>of(),
+ AtlasEntityDef departmentDef = AtlasTypeUtil.createClassTypeDef(DEPARTMENT_TYPE, getDescription(DEPARTMENT_TYPE), Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("count", "int"));
- AtlasEntityDef addressDef = AtlasTypeUtil.createClassTypeDef(ADDRESS_TYPE, getDescription(ADDRESS_TYPE), ImmutableSet.<String>of(),
+ AtlasEntityDef addressDef = AtlasTypeUtil.createClassTypeDef(ADDRESS_TYPE, getDescription(ADDRESS_TYPE), Collections.<String>emptySet(),
AtlasTypeUtil.createOptionalAttrDef("street", "string"),
AtlasTypeUtil.createRequiredAttrDef("city", "string"),
AtlasTypeUtil.createRequiredAttrDef("state", "string"),
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/resources/atlas-application.properties
----------------------------------------------------------------------
diff --git a/intg/src/test/resources/atlas-application.properties b/intg/src/test/resources/atlas-application.properties
new file mode 100644
index 0000000..b937c33
--- /dev/null
+++ b/intg/src/test/resources/atlas-application.properties
@@ -0,0 +1,145 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#system property
+atlas.data=${sys:user.dir}/target/data
+
+
+
+#re-use existing property
+atlas.graph.data=${atlas.data}/graph
+
+#plain property
+atlas.service=atlas
+
+#invalid system property
+atlas.db=${atlasdb}
+
+atlas.TypeSystem.impl=org.apache.atlas.typesystem.types.TypeSystem
+
+
+
+######### Atlas Server Configs #########
+atlas.rest.address=http://localhost:31000
+
+######### Graph Database Configs #########
+
+
+# Graph database implementation. Value inserted by maven.
+atlas.graphdb.backend=${graphdb.backend.impl}
+
+# Graph Storage
+atlas.graph.storage.backend=${graph.storage.backend}
+
+# Entity repository implementation
+atlas.EntityAuditRepository.impl=${entity.repository.impl}
+
+# Graph Search Index Backend
+atlas.graph.index.search.backend=${graph.index.backend}
+
+#Berkeley storage directory
+atlas.graph.storage.directory=${sys:atlas.data}/berkley
+
+#hbase
+#For standalone mode , specify localhost
+#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2
+
+atlas.graph.storage.hostname=${graph.storage.hostname}
+atlas.graph.storage.hbase.regions-per-server=1
+atlas.graph.storage.lock.wait-time=10000
+
+#ElasticSearch
+atlas.graph.index.search.directory=${sys:atlas.data}/es
+atlas.graph.index.search.elasticsearch.client-only=false
+atlas.graph.index.search.elasticsearch.local-mode=true
+atlas.graph.index.search.elasticsearch.create.sleep=2000
+
+# Solr cloud mode properties
+atlas.graph.index.search.solr.mode=cloud
+atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
+atlas.graph.index.search.max-result-set-size=150
+
+######### Hive Lineage Configs #########
+## Schema
+atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
+atlas.lineage.schema.query.hive_table_v1=hive_table_v1 where __guid='%s'\, columns
+
+######### Notification Configs #########
+atlas.notification.embedded=true
+
+atlas.kafka.zookeeper.connect=localhost:19026
+atlas.kafka.bootstrap.servers=localhost:19027
+atlas.kafka.data=${sys:atlas.data}/kafka
+atlas.kafka.zookeeper.session.timeout.ms=4000
+atlas.kafka.zookeeper.sync.time.ms=20
+atlas.kafka.consumer.timeout.ms=4000
+atlas.kafka.auto.commit.interval.ms=100
+atlas.kafka.hook.group.id=atlas
+atlas.kafka.entities.group.id=atlas_entities
+#atlas.kafka.auto.commit.enable=false
+
+atlas.kafka.enable.auto.commit=false
+atlas.kafka.auto.offset.reset=earliest
+atlas.kafka.session.timeout.ms=30000
+
+
+
+######### Entity Audit Configs #########
+atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS
+atlas.audit.zookeeper.session.timeout.ms=1000
+atlas.audit.hbase.zookeeper.quorum=localhost
+atlas.audit.hbase.zookeeper.property.clientPort=19026
+
+######### Security Properties #########
+
+# SSL config
+atlas.enableTLS=false
+atlas.server.https.port=31443
+
+######### Security Properties #########
+
+hbase.security.authentication=simple
+
+atlas.hook.falcon.synchronous=true
+
+######### JAAS Configuration ########
+
+atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule
+atlas.jaas.KafkaClient.loginModuleControlFlag = required
+atlas.jaas.KafkaClient.option.useKeyTab = true
+atlas.jaas.KafkaClient.option.storeKey = true
+atlas.jaas.KafkaClient.option.serviceName = kafka
+atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab
+atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM
+
+######### High Availability Configuration ########
+atlas.server.ha.enabled=false
+#atlas.server.ids=id1
+#atlas.server.address.id1=localhost:21000
+
+#########POLICY FILE PATH #########
+# atlas.auth.policy.file=policy-store.txt
+
+atlas.authentication.method.file=true
+atlas.authentication.method.ldap.type=none
+# atlas.authentication.method.file.filename=users-credentials.properties
+atlas.authentication.method.kerberos=false
+
+######### Gremlin Search Configuration #########
+# Set to false to disable gremlin search.
+atlas.search.gremlin.enable=true
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/resources/policy-store.txt
----------------------------------------------------------------------
diff --git a/intg/src/test/resources/policy-store.txt b/intg/src/test/resources/policy-store.txt
new file mode 100644
index 0000000..048affe
--- /dev/null
+++ b/intg/src/test/resources/policy-store.txt
@@ -0,0 +1,9 @@
+##Policy Format
+##r-READ, w-WRITE, u-UPDATE, d-DELETE
+##Policy_Name;;User_Name1:Operations_Allowed,User_Name2:Operations_Allowed;;Group_Name1:Operations_Allowed,Group_Name2:Operations_Allowed;;Resource_Type1:Resource_Name,Resource_Type2:Resource_Name
+##
+adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*
+dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*
+dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*
+hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*
+rangerTagSyncPolicy;;;;RANGER_TAG_SYNC:r;;type:*,entity:*
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/resources/sampleInstance.json
----------------------------------------------------------------------
diff --git a/intg/src/test/resources/sampleInstance.json b/intg/src/test/resources/sampleInstance.json
new file mode 100755
index 0000000..efcc48d
--- /dev/null
+++ b/intg/src/test/resources/sampleInstance.json
@@ -0,0 +1,72 @@
+{
+ "$typeName$": "Department",
+ "$id$": {
+ "id": -1420494283853484000,
+ "$typeName$": "Department",
+ "version": 0
+ },
+ "employees": [
+ {
+ "$typeName$": "Person",
+ "$id$": {
+ "id": -1420494283853508000,
+ "$typeName$": "Person",
+ "version": 0
+ },
+ "manager": {
+ "id": -1420494283853511000,
+ "$typeName$": "Manager",
+ "version": 0
+ },
+ "department": {
+ "id": -1420494283853484000,
+ "$typeName$": "Department",
+ "version": 0
+ },
+ "name": "John"
+ },
+ {
+ "$typeName$": "Manager",
+ "$id$": {
+ "id": -1420494283853511000,
+ "$typeName$": "Manager",
+ "version": 0
+ },
+ "manager": null,
+ "subordinates": [
+ {
+ "$typeName$": "Person",
+ "$id$": {
+ "id": -1420494283853508000,
+ "$typeName$": "Person",
+ "version": 0
+ },
+ "manager": {
+ "id": -1420494283853511000,
+ "$typeName$": "Manager",
+ "version": 0
+ },
+ "department": {
+ "id": -1420494283853484000,
+ "$typeName$": "Department",
+ "version": 0
+ },
+ "name": "John"
+ }
+ ],
+ "department": {
+ "id": -1420494283853484000,
+ "$typeName$": "Department",
+ "version": 0
+ },
+ "name": "Jane",
+ "$traits$": {
+ "SecurityClearance": {
+ "$typeName$": "SecurityClearance",
+ "level": 1
+ }
+ }
+ }
+ ],
+ "name": "hr"
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/resources/sampleTypes.json
----------------------------------------------------------------------
diff --git a/intg/src/test/resources/sampleTypes.json b/intg/src/test/resources/sampleTypes.json
new file mode 100755
index 0000000..d0ac311
--- /dev/null
+++ b/intg/src/test/resources/sampleTypes.json
@@ -0,0 +1,633 @@
+{
+ "enumTypes": [
+ {
+ "name": "HiveObjectType",
+ "enumValues": [
+ {
+ "value": "GLOBAL",
+ "ordinal": 1
+ },
+ {
+ "value": "DATABASE",
+ "ordinal": 2
+ },
+ {
+ "value": "TABLE",
+ "ordinal": 3
+ },
+ {
+ "value": "PARTITION",
+ "ordinal": 4
+ },
+ {
+ "value": "COLUMN",
+ "ordinal": 5
+ }
+ ]
+ },
+ {
+ "name": "LockLevel",
+ "enumValues": [
+ {
+ "value": "DB",
+ "ordinal": 1
+ },
+ {
+ "value": "TABLE",
+ "ordinal": 2
+ },
+ {
+ "value": "PARTITION",
+ "ordinal": 3
+ }
+ ]
+ },
+ {
+ "name": "TxnState",
+ "enumValues": [
+ {
+ "value": "COMMITTED",
+ "ordinal": 1
+ },
+ {
+ "value": "ABORTED",
+ "ordinal": 2
+ },
+ {
+ "value": "OPEN",
+ "ordinal": 3
+ }
+ ]
+ },
+ {
+ "name": "PrincipalType",
+ "enumValues": [
+ {
+ "value": "USER",
+ "ordinal": 1
+ },
+ {
+ "value": "ROLE",
+ "ordinal": 2
+ },
+ {
+ "value": "GROUP",
+ "ordinal": 3
+ }
+ ]
+ }
+ ],
+ "structTypes": [
+ {
+ "typeName": "t2",
+ "attributeDefinitions": [
+ {
+ "name": "a",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "s",
+ "dataTypeName": "t2",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "typeName": "t1",
+ "attributeDefinitions": [
+ {
+ "name": "a",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "b",
+ "dataTypeName": "boolean",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "c",
+ "dataTypeName": "byte",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "d",
+ "dataTypeName": "short",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "e",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "f",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "g",
+ "dataTypeName": "long",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "h",
+ "dataTypeName": "float",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "i",
+ "dataTypeName": "double",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "j",
+ "dataTypeName": "biginteger",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "k",
+ "dataTypeName": "bigdecimal",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "l",
+ "dataTypeName": "date",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "m",
+ "dataTypeName": "array<int>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "n",
+ "dataTypeName": "array<bigdecimal>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "o",
+ "dataTypeName": "map<string,double>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "typeName": "ts1",
+ "attributeDefinitions": [
+ {
+ "name": "a",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "b",
+ "dataTypeName": "boolean",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "c",
+ "dataTypeName": "byte",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "d",
+ "dataTypeName": "short",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "e",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "f",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "g",
+ "dataTypeName": "long",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "h",
+ "dataTypeName": "float",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "i",
+ "dataTypeName": "double",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "j",
+ "dataTypeName": "biginteger",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "k",
+ "dataTypeName": "bigdecimal",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "l",
+ "dataTypeName": "date",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "m",
+ "dataTypeName": "array<int>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "n",
+ "dataTypeName": "array<bigdecimal>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "o",
+ "dataTypeName": "map<string,double>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ }
+ ],
+ "traitTypes": [
+ {
+ "superTypes": [
+ "B",
+ "C"
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
+ "typeName": "D",
+ "attributeDefinitions": [
+ {
+ "name": "d",
+ "dataTypeName": "short",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
+ "typeName": "A",
+ "attributeDefinitions": [
+ {
+ "name": "a",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "b",
+ "dataTypeName": "boolean",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "c",
+ "dataTypeName": "byte",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "d",
+ "dataTypeName": "short",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ "A"
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
+ "typeName": "B",
+ "attributeDefinitions": [
+ {
+ "name": "b",
+ "dataTypeName": "boolean",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ "A"
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
+ "typeName": "C",
+ "attributeDefinitions": [
+ {
+ "name": "c",
+ "dataTypeName": "byte",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
+ "typeName": "SecurityClearance",
+ "attributeDefinitions": [
+ {
+ "name": "level",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ }
+ ],
+ "classTypes": [
+ {
+ "superTypes": [
+ "Person"
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
+ "typeName": "Manager",
+ "attributeDefinitions": [
+ {
+ "name": "subordinates",
+ "dataTypeName": "array<Person>",
+ "multiplicity": "collection",
+ "isComposite": false,
+ "reverseAttributeName": "manager"
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
+ "typeName": "Department",
+ "attributeDefinitions": [
+ {
+ "name": "name",
+ "dataTypeName": "string",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "employees",
+ "dataTypeName": "array<Person>",
+ "multiplicity": "collection",
+ "isComposite": true,
+ "reverseAttributeName": "department"
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
+ "typeName": "t4",
+ "attributeDefinitions": [
+ {
+ "name": "a",
+ "dataTypeName": "int",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "b",
+ "dataTypeName": "boolean",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "c",
+ "dataTypeName": "byte",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "d",
+ "dataTypeName": "short",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "enum1",
+ "dataTypeName": "HiveObjectType",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "e",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "f",
+ "dataTypeName": "int",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "g",
+ "dataTypeName": "long",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "enum2",
+ "dataTypeName": "PrincipalType",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "h",
+ "dataTypeName": "float",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "i",
+ "dataTypeName": "double",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "j",
+ "dataTypeName": "biginteger",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "k",
+ "dataTypeName": "bigdecimal",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "enum3",
+ "dataTypeName": "TxnState",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "l",
+ "dataTypeName": "date",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "m",
+ "dataTypeName": "array<int>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "n",
+ "dataTypeName": "array<bigdecimal>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "o",
+ "dataTypeName": "map<string,double>",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "enum4",
+ "dataTypeName": "LockLevel",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": null
+ }
+ ]
+ },
+ {
+ "superTypes": [
+ ],
+ "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
+ "typeName": "Person",
+ "attributeDefinitions": [
+ {
+ "name": "name",
+ "dataTypeName": "string",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": null
+ },
+ {
+ "name": "department",
+ "dataTypeName": "Department",
+ "multiplicity": "required",
+ "isComposite": false,
+ "reverseAttributeName": "employees"
+ },
+ {
+ "name": "manager",
+ "dataTypeName": "Manager",
+ "multiplicity": "optional",
+ "isComposite": false,
+ "reverseAttributeName": "subordinates"
+ }
+ ]
+ }
+ ]
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/resources/users-credentials.properties
----------------------------------------------------------------------
diff --git a/intg/src/test/resources/users-credentials.properties b/intg/src/test/resources/users-credentials.properties
new file mode 100644
index 0000000..3fc3bb1
--- /dev/null
+++ b/intg/src/test/resources/users-credentials.properties
@@ -0,0 +1,3 @@
+#username=group::sha256-password
+admin=ADMIN::8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918
+rangertagsync=RANGER_TAG_SYNC::e3f67240f5117d1753c940dae9eea772d36ed5fe9bd9c94a300e40413f1afb9d
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/pom.xml
----------------------------------------------------------------------
diff --git a/notification/pom.xml b/notification/pom.xml
index 9b36940..079d424 100644
--- a/notification/pom.xml
+++ b/notification/pom.xml
@@ -42,12 +42,12 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
+ <artifactId>atlas-server-api</artifactId>
</dependency>
<dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-server-api</artifactId>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
@@ -57,7 +57,7 @@
<dependency>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
</dependency>
<dependency>
@@ -77,11 +77,10 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
+ <artifactId>atlas-intg</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
-
</dependencies>
<build>
@@ -159,7 +158,7 @@
</artifactItem>
<artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -168,26 +167,6 @@
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>com.101tec</groupId>
<artifactId>zkclient</artifactId>
<version>${zkclient.version}</version>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
index 38f3208..bf6a36c 100644
--- a/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
+++ b/notification/src/main/java/org/apache/atlas/hook/AtlasHook.java
@@ -21,22 +21,20 @@ package org.apache.atlas.hook;
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.kafka.NotificationProvider;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.notification.NotificationException;
import org.apache.atlas.notification.NotificationInterface;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.apache.atlas.security.InMemoryJAASConfiguration;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.security.UserGroupInformation;
-import org.codehaus.jettison.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
@@ -101,18 +99,10 @@ public abstract class AtlasHook {
protected abstract String getNumberOfRetriesPropertyKey();
- protected void notifyEntities(String user, Collection<Referenceable> entities) {
- JSONArray entitiesArray = new JSONArray();
-
- for (Referenceable entity : entities) {
- LOG.info("Adding entity for type: {}", entity.getTypeName());
- final String entityJson = InstanceSerialization.toJson(entity, true);
- entitiesArray.put(entityJson);
- }
-
- List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
- hookNotificationMessages.add(new HookNotification.EntityCreateRequest(user, entitiesArray));
- notifyEntities(hookNotificationMessages);
+ protected void notifyEntities(String user, List<Referenceable> entities) {
+ List<HookNotification> hookNotifications = new ArrayList<>();
+ hookNotifications.add(new EntityCreateRequest(user, entities));
+ notifyEntities(hookNotifications);
}
/**
@@ -124,12 +114,12 @@ public abstract class AtlasHook {
* @param messages hook notification messages
* @param maxRetries maximum number of retries while sending message to messaging system
*/
- public static void notifyEntities(List<HookNotification.HookNotificationMessage> messages, int maxRetries) {
+ public static void notifyEntities(List<HookNotification> messages, int maxRetries) {
notifyEntitiesInternal(messages, maxRetries, notificationInterface, logFailedMessages, failedMessagesLogger);
}
@VisibleForTesting
- static void notifyEntitiesInternal(List<HookNotification.HookNotificationMessage> messages, int maxRetries,
+ static void notifyEntitiesInternal(List<HookNotification> messages, int maxRetries,
NotificationInterface notificationInterface,
boolean shouldLogFailedMessages, FailedMessagesLogger logger) {
if (messages == null || messages.isEmpty()) {
@@ -176,7 +166,7 @@ public abstract class AtlasHook {
*
* @param messages hook notification messages
*/
- protected void notifyEntities(List<HookNotification.HookNotificationMessage> messages) {
+ protected void notifyEntities(List<HookNotification> messages) {
final int maxRetries = atlasProperties.getInt(getNumberOfRetriesPropertyKey(), 3);
notifyEntities(messages, maxRetries);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java b/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
index e3bb71c..fd0c4e4 100644
--- a/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
+++ b/notification/src/main/java/org/apache/atlas/kafka/AtlasKafkaConsumer.java
@@ -18,7 +18,8 @@
package org.apache.atlas.kafka;
import org.apache.atlas.notification.AbstractNotificationConsumer;
-import org.apache.atlas.notification.MessageDeserializer;
+import org.apache.atlas.notification.AtlasNotificationMessageDeserializer;
+import org.apache.atlas.notification.NotificationInterface;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -41,13 +42,18 @@ public class AtlasKafkaConsumer<T> extends AbstractNotificationConsumer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AtlasKafkaConsumer.class);
private final KafkaConsumer kafkaConsumer;
- private final boolean autoCommitEnabled;
- private long pollTimeoutMilliSeconds = 1000L;
+ private final boolean autoCommitEnabled;
+ private long pollTimeoutMilliSeconds = 1000L;
- public AtlasKafkaConsumer(MessageDeserializer<T> deserializer, KafkaConsumer kafkaConsumer, boolean autoCommitEnabled, long pollTimeoutMilliSeconds) {
+ public AtlasKafkaConsumer(NotificationInterface.NotificationType notificationType, KafkaConsumer kafkaConsumer, boolean autoCommitEnabled, long pollTimeoutMilliSeconds) {
+ this(notificationType.getDeserializer(), kafkaConsumer, autoCommitEnabled, pollTimeoutMilliSeconds);
+ }
+
+ public AtlasKafkaConsumer(AtlasNotificationMessageDeserializer<T> deserializer, KafkaConsumer kafkaConsumer, boolean autoCommitEnabled, long pollTimeoutMilliSeconds) {
super(deserializer);
- this.kafkaConsumer = kafkaConsumer;
- this.autoCommitEnabled = autoCommitEnabled;
+
+ this.autoCommitEnabled = autoCommitEnabled;
+ this.kafkaConsumer = kafkaConsumer;
this.pollTimeoutMilliSeconds = pollTimeoutMilliSeconds;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java b/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
index 6bb8d73..4d6b444 100644
--- a/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
+++ b/notification/src/main/java/org/apache/atlas/kafka/KafkaNotification.java
@@ -23,6 +23,7 @@ import kafka.server.KafkaServer;
import kafka.utils.Time;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasException;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
import org.apache.atlas.notification.AbstractNotification;
import org.apache.atlas.notification.NotificationConsumer;
import org.apache.atlas.notification.NotificationException;
@@ -40,6 +41,7 @@ import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ZooKeeperServer;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.Order;
@@ -185,7 +187,8 @@ public class KafkaNotification extends AbstractNotification implements Service {
Properties consumerProperties = getConsumerProperties(notificationType);
List<NotificationConsumer<T>> consumers = new ArrayList<>();
- AtlasKafkaConsumer kafkaConsumer = new AtlasKafkaConsumer(notificationType.getDeserializer(), getKafkaConsumer(consumerProperties,notificationType, autoCommitEnabled), autoCommitEnabled, pollTimeOutMs );
+ AtlasKafkaConsumer kafkaConsumer =new AtlasKafkaConsumer(notificationType, getKafkaConsumer(consumerProperties, notificationType, autoCommitEnabled), autoCommitEnabled, pollTimeOutMs);
+
consumers.add(kafkaConsumer);
return consumers;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AbstractMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AbstractMessageDeserializer.java b/notification/src/main/java/org/apache/atlas/notification/AbstractMessageDeserializer.java
index 37a57d1..a787141 100644
--- a/notification/src/main/java/org/apache/atlas/notification/AbstractMessageDeserializer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/AbstractMessageDeserializer.java
@@ -18,148 +18,30 @@
package org.apache.atlas.notification;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParseException;
-import com.google.gson.reflect.TypeToken;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.MessageVersion;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
-import java.lang.reflect.Type;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
/**
* Base notification message deserializer.
*/
public abstract class AbstractMessageDeserializer<T> extends AtlasNotificationMessageDeserializer<T> {
- private static final Map<Type, JsonDeserializer> DESERIALIZER_MAP = new HashMap<>();
-
- static {
- DESERIALIZER_MAP.put(ImmutableList.class, new ImmutableListDeserializer());
- DESERIALIZER_MAP.put(ImmutableMap.class, new ImmutableMapDeserializer());
- DESERIALIZER_MAP.put(JSONArray.class, new JSONArrayDeserializer());
- DESERIALIZER_MAP.put(IStruct.class, new StructDeserializer());
- DESERIALIZER_MAP.put(IReferenceableInstance.class, new ReferenceableDeserializer());
- DESERIALIZER_MAP.put(Referenceable.class, new ReferenceableDeserializer());
- }
-
-
// ----- Constructors ----------------------------------------------------
/**
* Create a deserializer.
*
- * @param notificationMessageType the type of the notification message
* @param expectedVersion the expected message version
- * @param deserializerMap map of individual deserializers used to define this message deserializer
* @param notificationLogger logger for message version mismatch
*/
- public AbstractMessageDeserializer(Type notificationMessageType,
- MessageVersion expectedVersion,
- Map<Type, JsonDeserializer> deserializerMap,
- Logger notificationLogger) {
- super(notificationMessageType, expectedVersion, getDeserializer(deserializerMap), notificationLogger);
+ public AbstractMessageDeserializer(TypeReference<T> messageType,
+ TypeReference<AtlasNotificationMessage<T>> notificationMessageType,
+ MessageVersion expectedVersion, Logger notificationLogger) {
+ super(messageType, notificationMessageType, expectedVersion, notificationLogger);
}
// ----- helper methods --------------------------------------------------
-
- private static Gson getDeserializer(Map<Type, JsonDeserializer> deserializerMap) {
- GsonBuilder builder = new GsonBuilder();
-
- for (Map.Entry<Type, JsonDeserializer> entry : DESERIALIZER_MAP.entrySet()) {
- builder.registerTypeAdapter(entry.getKey(), entry.getValue());
- }
-
- for (Map.Entry<Type, JsonDeserializer> entry : deserializerMap.entrySet()) {
- builder.registerTypeAdapter(entry.getKey(), entry.getValue());
- }
- return builder.create();
- }
-
-
- // ----- deserializer classes --------------------------------------------
-
- /**
- * Deserializer for ImmutableList.
- */
- protected static class ImmutableListDeserializer implements JsonDeserializer<ImmutableList<?>> {
- public static final Type LIST_TYPE = new TypeToken<List<?>>() {
- }.getType();
-
- @Override
- public ImmutableList<?> deserialize(JsonElement json, Type type,
- JsonDeserializationContext context) {
- final List<?> list = context.deserialize(json, LIST_TYPE);
- return ImmutableList.copyOf(list);
- }
- }
-
- /**
- * Deserializer for ImmutableMap.
- */
- protected static class ImmutableMapDeserializer implements JsonDeserializer<ImmutableMap<?, ?>> {
-
- public static final Type MAP_TYPE = new TypeToken<Map<?, ?>>() {
- }.getType();
-
- @Override
- public ImmutableMap<?, ?> deserialize(JsonElement json, Type type,
- JsonDeserializationContext context) {
- final Map<?, ?> map = context.deserialize(json, MAP_TYPE);
- return ImmutableMap.copyOf(map);
- }
- }
-
- /**
- * Deserializer for JSONArray.
- */
- public static final class JSONArrayDeserializer implements JsonDeserializer<JSONArray> {
- @Override
- public JSONArray deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
- try {
- return new JSONArray(json.toString());
- } catch (JSONException e) {
- throw new JsonParseException(e.getMessage(), e);
- }
- }
- }
-
- /**
- * Deserializer for Struct.
- */
- protected static final class StructDeserializer implements JsonDeserializer<IStruct> {
- @Override
- public IStruct deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
- return context.deserialize(json, Struct.class);
- }
- }
-
- /**
- * Deserializer for Referenceable.
- */
- protected static final class ReferenceableDeserializer implements JsonDeserializer<IReferenceableInstance> {
- @Override
- public IReferenceableInstance deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
-
- return InstanceSerialization.fromJsonReferenceable(json.toString(), true);
- }
- }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java b/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
index 4f56bd8..8bc7cb4 100644
--- a/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
+++ b/notification/src/main/java/org/apache/atlas/notification/AbstractNotification.java
@@ -18,18 +18,19 @@
package org.apache.atlas.notification;
import com.google.common.annotations.VisibleForTesting;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import org.apache.atlas.AtlasException;
import org.apache.atlas.ha.HAConfiguration;
-import org.apache.atlas.notification.AtlasNotificationBaseMessage.CompressionKind;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
+import org.apache.atlas.model.notification.AtlasNotificationBaseMessage;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.AtlasNotificationStringMessage;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.model.notification.AtlasNotificationBaseMessage.CompressionKind;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.model.notification.MessageVersion;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jettison.json.JSONArray;
@@ -45,8 +46,8 @@ import java.util.List;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
-import static org.apache.atlas.notification.AtlasNotificationBaseMessage.MESSAGE_COMPRESSION_ENABLED;
-import static org.apache.atlas.notification.AtlasNotificationBaseMessage.MESSAGE_MAX_LENGTH_BYTES;
+import static org.apache.atlas.model.notification.AtlasNotificationBaseMessage.MESSAGE_COMPRESSION_ENABLED;
+import static org.apache.atlas.model.notification.AtlasNotificationBaseMessage.MESSAGE_MAX_LENGTH_BYTES;
/**
* Abstract notification interface implementation.
@@ -79,15 +80,6 @@ public abstract class AbstractNotification implements NotificationInterface {
private final boolean embedded;
private final boolean isHAEnabled;
- /**
- * Used for message serialization.
- */
- public static final Gson GSON = new GsonBuilder().
- registerTypeAdapter(IReferenceableInstance.class, new ReferenceableSerializer()).
- registerTypeAdapter(Referenceable.class, new ReferenceableSerializer()).
- registerTypeAdapter(JSONArray.class, new JSONArraySerializer()).
- create();
-
// ----- Constructors ----------------------------------------------------
public AbstractNotification(Configuration applicationProperties) throws AtlasException {
@@ -160,7 +152,7 @@ public abstract class AbstractNotification implements NotificationInterface {
public static String getMessageJson(Object message) {
AtlasNotificationMessage<?> notificationMsg = new AtlasNotificationMessage<>(CURRENT_MESSAGE_VERSION, message);
- return GSON.toJson(notificationMsg);
+ return AtlasType.toV1Json(notificationMsg);
}
private static String getHostAddress() {
@@ -190,7 +182,7 @@ public abstract class AbstractNotification implements NotificationInterface {
*/
public static void createNotificationMessages(Object message, List<String> msgJsonList) {
AtlasNotificationMessage<?> notificationMsg = new AtlasNotificationMessage<>(CURRENT_MESSAGE_VERSION, message, getHostAddress(), getCurrentUser());
- String msgJson = GSON.toJson(notificationMsg);
+ String msgJson = AtlasType.toV1Json(notificationMsg);
boolean msgLengthExceedsLimit = (msgJson.length() * MAX_BYTES_PER_CHAR) > MESSAGE_MAX_LENGTH_BYTES;
@@ -215,7 +207,7 @@ public abstract class AbstractNotification implements NotificationInterface {
if (!msgLengthExceedsLimit) { // no need to split
AtlasNotificationStringMessage compressedMsg = new AtlasNotificationStringMessage(encodedBytes, msgId, compressionKind);
- msgJson = GSON.toJson(compressedMsg); // msgJson will not have multi-byte characters here, due to use of encodeBase64() above
+ msgJson = AtlasType.toV1Json(compressedMsg); // msgJson will not have multi-byte characters here, due to use of encodeBase64() above
msgBytes = null; // not used after this point
} else { // encodedBytes will be split
msgJson = null; // not used after this point
@@ -241,7 +233,7 @@ public abstract class AbstractNotification implements NotificationInterface {
AtlasNotificationStringMessage splitMsg = new AtlasNotificationStringMessage(encodedBytes, offset, length, msgId, compressionKind, i, splitCount);
- String splitMsgJson = GSON.toJson(splitMsg);
+ String splitMsgJson = AtlasType.toV1Json(splitMsg);
msgJsonList.add(splitMsgJson);
@@ -264,10 +256,10 @@ public abstract class AbstractNotification implements NotificationInterface {
/**
* Serializer for Referenceable.
*/
- public static final class ReferenceableSerializer implements JsonSerializer<IReferenceableInstance> {
+ public static final class ReferenceableSerializer implements JsonSerializer<Referenceable> {
@Override
- public JsonElement serialize(IReferenceableInstance src, Type typeOfSrc, JsonSerializationContext context) {
- String instanceJson = InstanceSerialization.toJson(src, true);
+ public JsonElement serialize(Referenceable src, Type typeOfSrc, JsonSerializationContext context) {
+ String instanceJson = AtlasType.toV1Json(src);
return new JsonParser().parse(instanceJson).getAsJsonObject();
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AbstractNotificationConsumer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AbstractNotificationConsumer.java b/notification/src/main/java/org/apache/atlas/notification/AbstractNotificationConsumer.java
index 8cf1e8e..c3940ce 100644
--- a/notification/src/main/java/org/apache/atlas/notification/AbstractNotificationConsumer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/AbstractNotificationConsumer.java
@@ -16,30 +16,19 @@
* limitations under the License.
*/
package org.apache.atlas.notification;
+
import org.apache.kafka.common.TopicPartition;
+
/**
* Abstract notification consumer.
*/
public abstract class AbstractNotificationConsumer<T> implements NotificationConsumer<T> {
+ protected final AtlasNotificationMessageDeserializer<T> deserializer;
- /**
- * Deserializer used to deserialize notification messages for this consumer.
- */
- protected final MessageDeserializer<T> deserializer;
-
-
-
- /**
- * Construct an AbstractNotificationConsumer.
- *
- * @param deserializer the message deserializer used by this consumer
- */
- public AbstractNotificationConsumer(MessageDeserializer<T> deserializer) {
+ protected AbstractNotificationConsumer(AtlasNotificationMessageDeserializer<T> deserializer) {
this.deserializer = deserializer;
}
-
-
public abstract void commit(TopicPartition partition, long offset);
}
[15/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/service/DefaultMetadataServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/service/DefaultMetadataServiceTest.java b/repository/src/test/java/org/apache/atlas/service/DefaultMetadataServiceTest.java
deleted file mode 100644
index e885b8c..0000000
--- a/repository/src/test/java/org/apache/atlas/service/DefaultMetadataServiceTest.java
+++ /dev/null
@@ -1,1381 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.service;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Inject;
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.listener.ChangedTypeDefs;
-import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.listener.TypeDefChangeListener;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.audit.EntityAuditRepository;
-import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
-import org.apache.atlas.repository.audit.HBaseTestUtils;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.services.DefaultMetadataService;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.ValueConversionException;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.atlas.utils.ParamChecker;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.testng.Assert;
-import org.testng.annotations.AfterTest;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import static org.apache.atlas.TestUtils.*;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.testng.Assert.*;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class DefaultMetadataServiceTest {
- @Inject
- private MetadataService metadataService;
-
- private TypeDefChangeListener typeDefChangeListener;
-
- @Inject
- private EntityAuditRepository auditRepository;
-
- @Inject
- private GraphBackedDiscoveryService discoveryService;
-
- private Referenceable db = createDBEntity();
-
- private Referenceable table;
-
- private Id tableId;
-
- private final String NAME = "name";
-
-
- @BeforeTest
- public void setUp() throws Exception {
-
- typeDefChangeListener = (DefaultMetadataService)metadataService;
- metadataService = TestUtils.addSessionCleanupWrapper(metadataService);
-
- if (auditRepository instanceof HBaseBasedAuditRepository) {
- HBaseTestUtils.startCluster();
- ((HBaseBasedAuditRepository) auditRepository).start();
- }
- TestUtils.resetRequestContext();
- RequestContext.get().setUser("testuser");
-
- TypesDef typesDef = TestUtils.defineHiveTypes();
- try {
- metadataService.getTypeDefinition(TestUtils.TABLE_TYPE);
- } catch (TypeNotFoundException e) {
- metadataService.createType(TypesSerialization.toJson(typesDef));
- }
-
- String dbGUid = TestUtils.createInstance(metadataService, db);
- table = createTableEntity(dbGUid);
- String tableGuid = TestUtils.createInstance(metadataService, table);
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- table = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- tableId = new Id(tableGuid, 0, TestUtils.TABLE_TYPE);
- }
-
- @AfterTest
- public void shutdown() throws Exception {
- try {
- TypeSystem.getInstance().reset();
-
- if (auditRepository instanceof HBaseBasedAuditRepository) {
- ((HBaseBasedAuditRepository) auditRepository).stop();
- HBaseTestUtils.stopCluster();
- }
- }
- finally {
- AtlasGraphProvider.cleanup();
- }
- }
- private EntityResult updateInstance(Referenceable entity) throws Exception {
- RequestContext.createContext();
- ParamChecker.notNull(entity, "Entity");
- ParamChecker.notNull(entity.getId(), "Entity");
- String entityjson = InstanceSerialization.toJson(entity, true);
- JSONArray entitiesJson = new JSONArray();
- entitiesJson.put(entityjson);
- return metadataService.updateEntities(entitiesJson.toString()).getEntityResult();
- }
-
- @Test(expectedExceptions = TypeNotFoundException.class)
- public void testCreateEntityWithUnknownDatatype() throws Exception {
- Referenceable entity = new Referenceable("Unknown datatype");
- String dbName = TestUtils.randomString(10);
- entity.set(NAME, dbName);
- entity.set("description", "us db");
- TestUtils.createInstance(metadataService, entity);
- Assert.fail(TypeNotFoundException.class.getSimpleName() + " was expected but none thrown.");
- }
-
- @Test
- public void testCreateEntityWithUniqueAttribute() throws Exception {
- //name is the unique attribute
- Referenceable entity = createDBEntity();
- String id = TestUtils.createInstance(metadataService, entity);
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.ENTITY_CREATE);
-
- //using the same name should succeed, but not create another entity
- String newId = TestUtils.createInstance(metadataService, entity);
- assertNull(newId);
-
- //Same entity, but different qualified name should succeed
- entity.set(NAME, TestUtils.randomString());
- newId = TestUtils.createInstance(metadataService, entity);
- Assert.assertNotEquals(newId, id);
- }
-
- @Test
- //Titan doesn't allow some reserved chars in property keys. Verify that atlas encodes these
- //See GraphHelper.encodePropertyKey()
- public void testSpecialCharacters() throws Exception {
- //Verify that type can be created with reserved characters in typename, attribute name
- String strAttrName = randomStrWithReservedChars();
- String arrayAttrName = randomStrWithReservedChars();
- String mapAttrName = randomStrWithReservedChars();
- HierarchicalTypeDefinition<ClassType> typeDefinition =
- createClassTypeDef("test_type_"+ TestUtils.randomString(10), ImmutableSet.<String>of(),
- createOptionalAttrDef(strAttrName, DataTypes.STRING_TYPE),
- new AttributeDefinition(arrayAttrName, DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
- Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition(mapAttrName,
- DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE.getName()),
- Multiplicity.OPTIONAL, false, null));
- metadataService.createType(TypesSerialization.toJson(typeDefinition, false));
-
- //verify that entity can be created with reserved characters in string value, array value and map key and value
- Referenceable entity = new Referenceable(typeDefinition.typeName);
- entity.set(strAttrName, randomStrWithReservedChars());
- entity.set(arrayAttrName, new ArrayList<String>() {{ add(randomStrWithReservedChars()); }});
- entity.set(mapAttrName, new HashMap<String, String>() {{
- put(randomStrWithReservedChars(), randomStrWithReservedChars());
- }});
- String id = createInstance(metadataService, entity);
-
- //Verify that get entity definition returns actual values with reserved characters
- Referenceable instance =
- InstanceSerialization.fromJsonReferenceable(metadataService.getEntityDefinitionJson(id), true);
- assertReferenceableEquals(instance, entity);
-
- //Verify that search with reserved characters works - for string attribute
- String query =
- String.format("`%s` where `%s` = '%s'", typeDefinition.typeName, strAttrName, entity.get(strAttrName));
- String responseJson = discoveryService.searchByDSL(query, new QueryParams(1, 0));
- JSONObject response = new JSONObject(responseJson);
- assertEquals(response.getJSONArray("rows").length(), 1);
- }
-
- //equals excluding the id
- private void assertReferenceableEquals(Referenceable actual, Referenceable expected) {
- List<String> traits = actual.getTraits();
- Map<String, IStruct> traitsMap = new HashMap<>();
- for (String trait : traits) {
- traitsMap.put(trait, actual.getTrait(trait));
- }
-
- Referenceable newActual = new Referenceable(expected.getId(), actual.getTypeName(), actual.getValuesMap(),
- traits, traitsMap);
- assertEquals(newActual, expected);
- }
-
- private String randomStrWithReservedChars() {
- return randomString() + "\"${}%";
- }
-
- @Test
- public void testAddDeleteTrait() throws Exception {
- Referenceable entity = createDBEntity();
- String id = TestUtils.createInstance(metadataService, entity);
-
- //add trait
- Struct tag = new Struct(TestUtils.PII);
- metadataService.addTrait(id, InstanceSerialization.toJson(tag, true));
-
- List<String> traits = metadataService.getTraitNames(id);
- assertEquals(traits.size(), 1);
- assertEquals(traits.get(0), PII);
-
- //getTrait
- IStruct traitDefinition = metadataService.getTraitDefinition(id, PII);
- Assert.assertNotNull(traitDefinition);
- assertEquals(traitDefinition.getValuesMap().size(), 0);
-
- //delete trait
- metadataService.deleteTrait(id, PII);
- traits = metadataService.getTraitNames(id);
- assertEquals(traits.size(), 0);
-
- //add trait again
- metadataService.addTrait(id, InstanceSerialization.toJson(tag, true));
-
- traits = metadataService.getTraitNames(id);
- assertEquals(traits.size(), 1);
- assertEquals(traits.get(0), PII);
- }
-
- @Test
- public void testEntityAudit() throws Exception {
- //create entity
- Referenceable entity = createDBEntity();
- String id = TestUtils.createInstance(metadataService, entity);
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.ENTITY_CREATE);
-
- Struct tag = new Struct(TestUtils.PII);
- metadataService.addTrait(id, InstanceSerialization.toJson(tag, true));
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.TAG_ADD);
-
- metadataService.deleteTrait(id, TestUtils.PII);
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.TAG_DELETE);
-
- metadataService.updateEntityAttributeByGuid(id, "description", "new description");
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.ENTITY_UPDATE);
-
- metadataService.deleteEntities(Arrays.asList(id));
- assertAuditEvents(id, EntityAuditEvent.EntityAuditAction.ENTITY_DELETE);
- }
-
- private EntityResult deleteEntities(String... guids) throws AtlasException {
- RequestContext.createContext();
- return metadataService.deleteEntities(Arrays.asList(guids));
- }
-
- private void assertAuditEvents(String id, EntityAuditEvent.EntityAuditAction expectedAction) throws Exception {
- List<EntityAuditEvent> events =
- auditRepository.listEvents(id, null, (short) 10);
- for (EntityAuditEvent event : events) {
- if (event.getAction() == expectedAction) {
- return;
- }
- }
- fail("Expected audit action " + expectedAction);
- }
-
- private void assertAuditEvents(String entityId, int numEvents) throws Exception {
- List<EntityAuditEvent> events = metadataService.getAuditEvents(entityId, null, (short) numEvents);
- assertNotNull(events);
- assertEquals(events.size(), numEvents);
- }
-
- @Test
- public void testCreateEntityWithUniqueAttributeWithReference() throws Exception {
- Referenceable db = createDBEntity();
- String dbId = TestUtils.createInstance(metadataService, db);
-
- //Assert that there is just 1 audit events and thats for entity create
- assertAuditEvents(dbId, 1);
- assertAuditEvents(dbId, EntityAuditEvent.EntityAuditAction.ENTITY_CREATE);
-
- Referenceable table = new Referenceable(TestUtils.TABLE_TYPE);
- table.set(NAME, TestUtils.randomString());
- table.set("description", "random table");
- table.set("type", "type");
- table.set("tableType", "MANAGED");
- table.set("database", new Id(dbId, 0, TestUtils.DATABASE_TYPE));
- table.set("databaseComposite", db);
- TestUtils.createInstance(metadataService, table);
-
- //table create should re-use the db instance created earlier
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- Referenceable actualDb = (Referenceable) tableDefinition.get("databaseComposite");
- assertEquals(actualDb.getId().id, dbId);
-
- //Assert that as part table create, db is not created and audit event is not added to db
- assertAuditEvents(dbId, 1);
- }
-
- @Test
- public void testUpdateEntityByUniqueAttribute() throws Exception {
- final List<String> colNameList = ImmutableList.of("col1", "col2");
- Referenceable tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put("columnNames", colNameList);
- }});
- metadataService.updateEntityByUniqueAttribute(table.getTypeName(), NAME, (String) table.get(NAME),
- tableUpdated);
-
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- List<String> actualColumns = (List) tableDefinition.get("columnNames");
- assertEquals(actualColumns, colNameList);
- }
-
- @Test
- public void testUpdateEntityWithMap() throws Exception {
- final Map<String, Struct> partsMap = new HashMap<>();
- partsMap.put("part0", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
- new HashMap<String, Object>() {{
- put(NAME, "test");
- }}));
-
- table.set("partitionsMap", partsMap);
-
- updateInstance(table);
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertTrue(partsMap.get("part0").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part0")));
-
- //update map - add a map key
- partsMap.put("part1", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
- new HashMap<String, Object>() {{
- put(NAME, "test1");
- }}));
- table.set("partitionsMap", partsMap);
-
- updateInstance(table);
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- assertEquals(((Map<String, Struct>)tableDefinition.get("partitionsMap")).size(), 2);
- Assert.assertTrue(partsMap.get("part1").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part1")));
-
- //update map - remove a key and add another key
- partsMap.remove("part0");
- partsMap.put("part2", new Struct(TestUtils.PARTITION_STRUCT_TYPE,
- new HashMap<String, Object>() {{
- put(NAME, "test2");
- }}));
- table.set("partitionsMap", partsMap);
-
- updateInstance(table);
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- assertEquals(((Map<String, Struct>)tableDefinition.get("partitionsMap")).size(), 2);
- Assert.assertNull(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part0"));
- Assert.assertTrue(partsMap.get("part2").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part2")));
-
- //update struct value for existing map key
- Struct partition2 = partsMap.get("part2");
- partition2.set(NAME, "test2Updated");
- updateInstance(table);
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- assertEquals(((Map<String, Struct>)tableDefinition.get("partitionsMap")).size(), 2);
- Assert.assertNull(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part0"));
- Assert.assertTrue(partsMap.get("part2").equalsContents(((Map<String, Struct>)tableDefinition.get("partitionsMap")).get("part2")));
-
- //Test map pointing to a class
- final Map<String, Referenceable> columnsMap = new HashMap<>();
- Referenceable col0Type = new Referenceable(TestUtils.COLUMN_TYPE,
- new HashMap<String, Object>() {{
- put(NAME, "test1");
- put("type", "string");
- }});
-
- columnsMap.put("col0", col0Type);
-
- Referenceable col1Type = new Referenceable(TestUtils.COLUMN_TYPE,
- new HashMap<String, Object>() {{
- put(NAME, "test2");
- put("type", "string");
- }});
-
- columnsMap.put("col1", col1Type);
- table.set(TestUtils.COLUMNS_MAP, columnsMap);
- updateInstance(table);
- verifyMapUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columnsMap, TestUtils.COLUMNS_MAP);
-
- //Swap elements
- columnsMap.clear();
- columnsMap.put("col0", col1Type);
- columnsMap.put("col1", col0Type);
-
- table.set(TestUtils.COLUMNS_MAP, columnsMap);
- updateInstance(table);
- verifyMapUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columnsMap, TestUtils.COLUMNS_MAP);
-
- //Drop the first key and change the class type as well to col0
- columnsMap.clear();
- columnsMap.put("col0", col0Type);
-
- table.set(TestUtils.COLUMNS_MAP, columnsMap);
- updateInstance(table);
- verifyMapUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columnsMap, TestUtils.COLUMNS_MAP);
-
- //Clear state
- table.setNull(TestUtils.COLUMNS_MAP);
- updateInstance(table);
- verifyMapUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), null, TestUtils.COLUMNS_MAP);
- }
-
- private void verifyMapUpdates(String typeName, String uniqAttrName, String uniqAttrValue,
- Map<String, Referenceable> expectedMap, String mapAttrName) throws AtlasException {
- String json =
- metadataService.getEntityDefinition(typeName, uniqAttrName, uniqAttrValue);
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(json, true);
- Map<String, Referenceable> actualMap = (Map<String, Referenceable>) tableDefinition.get(mapAttrName);
-
- if (expectedMap == null && actualMap != null) {
- //all are marked as deleted in case of soft delete
- for (String key : actualMap.keySet()) {
- assertEquals(actualMap.get(key).getId().state, Id.EntityState.DELETED);
- }
- } else if(expectedMap == null) {
- //hard delete case
- assertNull(actualMap);
- } else {
- assertTrue(actualMap.size() >= expectedMap.size());
-
- for (String key : expectedMap.keySet()) {
- assertTrue(actualMap.get(key).equalsContents(expectedMap.get(key)));
- }
-
- //rest of the keys are marked as deleted
- List<String> extraKeys = new ArrayList<>(actualMap.keySet());
- extraKeys.removeAll(expectedMap.keySet());
- for (String key : extraKeys) {
- assertEquals(actualMap.get(key).getId().getState(), Id.EntityState.DELETED);
- }
- }
- }
-
- @Test
- public void testUpdateEntityAddAndUpdateArrayAttr() throws Exception {
- //Update entity, add new array attribute
- //add array of primitives
- final List<String> colNameList = ImmutableList.of("col1", "col2");
- Referenceable tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put("columnNames", colNameList);
- }});
- metadataService.updateEntityPartialByGuid(tableId._getId(), tableUpdated);
-
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- List<String> actualColumns = (List) tableDefinition.get("columnNames");
- assertEquals(actualColumns, colNameList);
-
- //update array of primitives
- final List<String> updatedColNameList = ImmutableList.of("col2", "col3");
- tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put("columnNames", updatedColNameList);
- }});
- metadataService.updateEntityPartialByGuid(tableId.getId()._getId(), tableUpdated);
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- actualColumns = (List) tableDefinition.get("columnNames");
- assertEquals(actualColumns, updatedColNameList);
- }
-
- private EntityResult updateEntityPartial(String guid, Referenceable entity) throws AtlasException {
- RequestContext.createContext();
- return metadataService.updateEntityPartialByGuid(guid, entity).getEntityResult();
- }
-
- @Test
- public void testUpdateEntityArrayOfClass() throws Exception {
- //test array of class with id
- final List<Referenceable> columns = new ArrayList<>();
- Map<String, Object> values = new HashMap<>();
- values.put(NAME, "col1");
- values.put("type", "type");
- Referenceable col1 = new Referenceable(TestUtils.COLUMN_TYPE, values);
- columns.add(col1);
- Referenceable tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put(COLUMNS_ATTR_NAME, columns);
- }});
-
- EntityResult entityResult = updateEntityPartial(tableId._getId(), tableUpdated);
- assertEquals(entityResult.getCreatedEntities().size(), 1); //col1 created
- assertEquals(entityResult.getUpdateEntities().size(), 1); //table updated
- assertEquals(entityResult.getUpdateEntities().get(0), tableId._getId());
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //Partial update. Add col2 But also update col1
- Map<String, Object> valuesCol5 = new HashMap<>();
- valuesCol5.put(NAME, "col2");
- valuesCol5.put("type", "type");
- Referenceable col2 = new Referenceable(TestUtils.COLUMN_TYPE, valuesCol5);
- //update col1
- col1.set("type", "type1");
- columns.add(col2);
-
- tableUpdated = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put(COLUMNS_ATTR_NAME, columns);
- }});
- entityResult = updateEntityPartial(tableId._getId(), tableUpdated);
- assertEquals(entityResult.getCreatedEntities().size(), 1); //col2 created
- assertEquals(entityResult.getUpdateEntities().size(), 2); //table, col1 updated
-
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //Complete update. Add array elements - col3,col4
- Map<String, Object> values1 = new HashMap<>();
- values1.put(NAME, "col3");
- values1.put("type", "type");
- Referenceable col3 = new Referenceable(TestUtils.COLUMN_TYPE, values1);
- columns.add(col3);
-
- Map<String, Object> values2 = new HashMap<>();
- values2.put(NAME, "col4");
- values2.put("type", "type");
- Referenceable col4 = new Referenceable(TestUtils.COLUMN_TYPE, values2);
- columns.add(col4);
-
- table.set(COLUMNS_ATTR_NAME, columns);
- entityResult = updateInstance(table);
- assertEquals(entityResult.getCreatedEntities().size(), 2); //col3, col4 created
-
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //Swap elements
- columns.clear();
- columns.add(col4);
- columns.add(col3);
-
- table.set(COLUMNS_ATTR_NAME, columns);
- entityResult = updateInstance(table);
- assertEquals(entityResult.getDeletedEntities().size(), 2); //col1, col2 are deleted
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //drop a single column
- columns.clear();
- columns.add(col3);
-
- table.set(COLUMNS_ATTR_NAME, columns);
- entityResult = updateInstance(table);
- assertEquals(entityResult.getDeletedEntities().size(), 1); //col4 deleted
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //Remove a class reference/Id and insert another reference
- //Also covers isComposite case since columns is a composite
- values.clear();
- columns.clear();
-
- values.put(NAME, "col5");
- values.put("type", "type");
- Referenceable col5 = new Referenceable(TestUtils.COLUMN_TYPE, values);
- columns.add(col5);
- table.set(COLUMNS_ATTR_NAME, columns);
- entityResult = updateInstance(table);
- assertEquals(entityResult.getCreatedEntities().size(), 1); //col5 created
- assertEquals(entityResult.getDeletedEntities().size(), 1); //col3 deleted
-
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), columns, COLUMNS_ATTR_NAME);
-
- //Update array column to null
- table.setNull(COLUMNS_ATTR_NAME);
- entityResult = updateInstance(table);
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- verifyArrayUpdates(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME), null, COLUMNS_ATTR_NAME);
- }
-
- private void verifyArrayUpdates(String typeName, String uniqAttrName, String uniqAttrValue,
- List<Referenceable> expectedArray, String arrAttrName) throws AtlasException {
- String json = metadataService.getEntityDefinition(typeName, uniqAttrName, uniqAttrValue);
- Referenceable entityDefinition = InstanceSerialization.fromJsonReferenceable(json, true);
- List<Referenceable> actualArray = (List<Referenceable>) entityDefinition.get(arrAttrName);
- if (expectedArray == null && actualArray != null) {
- //all are marked as deleted in case of soft delete
- for (Referenceable referenceable : actualArray) {
- assertEquals(referenceable.getId().state, Id.EntityState.DELETED);
- }
- } else if(expectedArray == null) {
- //hard delete case
- assertNull(actualArray);
- } else {
- int index;
- for (index = 0; index < expectedArray.size(); index++) {
- Assert.assertTrue(actualArray.get(index).equalsContents(expectedArray.get(index)));
- }
-
- //Rest of the entities in the list are marked as deleted
- for (; index < actualArray.size(); index++) {
- assertEquals(actualArray.get(index).getId().state, Id.EntityState.DELETED);
- }
- }
- }
-
- @Test
- public void testStructs() throws Exception {
- Struct serdeInstance = new Struct(TestUtils.SERDE_TYPE);
- serdeInstance.set(NAME, "serde1Name");
- serdeInstance.set("serde", "test");
- serdeInstance.set("description", "testDesc");
- table.set("serde1", serdeInstance);
-
- String newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- Assert.assertNotNull(tableDefinition.get("serde1"));
- Assert.assertTrue(serdeInstance.equalsContents(tableDefinition.get("serde1")));
-
- //update struct attribute
- serdeInstance.set("serde", "testUpdated");
- updateInstance(table);
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertTrue(serdeInstance.equalsContents(tableDefinition.get("serde1")));
-
- //set to null
- serdeInstance.setNull("description");
- updateInstance(table);
- tableDefinitionJson =
- metadataService.getEntityDefinitionJson(tableId._getId());
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- Assert.assertNull(((Struct) tableDefinition.get("serde1")).get("description"));
- }
-
-
- @Test
- public void testCreateEntityWithReferenceableHavingIdNoValue() throws Exception {
- //ATLAS-383 Test
- Referenceable sdReferenceable = new Referenceable(TestUtils.STORAGE_DESC_TYPE);
- sdReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, TestUtils.randomString());
- sdReferenceable.set("compressed", "false");
- sdReferenceable.set("location", "hdfs://tmp/hive-user");
- String sdGuid = TestUtils.createInstance(metadataService, sdReferenceable);
-
- Referenceable sdRef2 = new Referenceable(sdGuid, TestUtils.STORAGE_DESC_TYPE, null);
-
- Referenceable partRef = new Referenceable(TestUtils.PARTITION_CLASS_TYPE);
- partRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "part-unique");
- partRef.set("values", ImmutableList.of("2014-10-01"));
- partRef.set("table", table);
- partRef.set("sd", sdRef2);
-
- String partGuid = TestUtils.createInstance(metadataService, partRef);
- Assert.assertNotNull(partGuid);
- }
-
- @Test
- public void testClassUpdate() throws Exception {
- //Create new db instance
- final Referenceable databaseInstance = new Referenceable(TestUtils.DATABASE_TYPE);
- databaseInstance.set(NAME, TestUtils.randomString());
- databaseInstance.set("description", "new database");
-
- String dbId = TestUtils.createInstance(metadataService, databaseInstance);
-
- /*Update reference property with Id */
- metadataService.updateEntityAttributeByGuid(tableId._getId(), "database", dbId);
-
- String tableDefinitionJson =
- metadataService.getEntityDefinitionJson(tableId._getId());
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- assertEquals(dbId, (((Id) tableDefinition.get("database"))._getId()));
-
- /* Update with referenceable - TODO - Fails . Need to fix this */
- /*final String dbName = TestUtils.randomString();
- final Referenceable databaseInstance2 = new Referenceable(TestUtils.DATABASE_TYPE);
- databaseInstance2.set(NAME, dbName);
- databaseInstance2.set("description", "new database 2");
-
- Referenceable updateTable = new Referenceable(TestUtils.TABLE_TYPE, new HashMap<String, Object>() {{
- put("database", databaseInstance2);
- }});
- metadataService.updateEntityAttributeByGuid(tableId._getId(), updateTable);
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(tableId._getId());
- Referenceable tableDefinitionActual = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- String dbDefJson = metadataService.getEntityDefinition(TestUtils.DATABASE_TYPE, NAME, dbName);
- Referenceable dbDef = InstanceSerialization.fromJsonReferenceable(dbDefJson, true);
-
- Assert.assertNotEquals(dbId, (((Id) tableDefinitionActual.get("database"))._getId()));
- Assert.assertEquals(dbDef.getObjectId()._getId(), (((Id) tableDefinitionActual.get("database"))._getId())); */
-
- }
-
- @Test
- public void testArrayOfStructs() throws Exception {
- //Add array of structs
- TestUtils.dumpGraph(TestUtils.getGraph());
-
- final Struct partition1 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- partition1.set(NAME, "part1");
-
- final Struct partition2 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- partition2.set(NAME, "part2");
-
- List<Struct> partitions = new ArrayList<Struct>(){{ add(partition1); add(partition2); }};
- table.set("partitions", partitions);
-
- String newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNotNull(tableDefinition.get("partitions"));
- List<Struct> partitionsActual = (List<Struct>) tableDefinition.get("partitions");
- assertPartitions(partitionsActual, partitions);
-
- //add a new element to array of struct
- final Struct partition3 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- partition3.set(NAME, "part3");
- partitions.add(partition3);
- table.set("partitions", partitions);
- newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNotNull(tableDefinition.get("partitions"));
- partitionsActual = (List<Struct>) tableDefinition.get("partitions");
- assertPartitions(partitionsActual, partitions);
-
- //remove one of the struct values
- partitions.remove(1);
- table.set("partitions", partitions);
- newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNotNull(tableDefinition.get("partitions"));
- partitionsActual = (List<Struct>) tableDefinition.get("partitions");
- assertPartitions(partitionsActual, partitions);
-
- //Update struct value within array of struct
- partitions.get(0).set(NAME, "part4");
- newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNotNull(tableDefinition.get("partitions"));
- partitionsActual = (List<Struct>) tableDefinition.get("partitions");
- assertPartitions(partitionsActual, partitions);
-
- //add a repeated element to array of struct
- final Struct partition4 = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- partition4.set(NAME, "part4");
- partitions.add(partition4);
- table.set("partitions", partitions);
- newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNotNull(tableDefinition.get("partitions"));
- partitionsActual = (List<Struct>) tableDefinition.get("partitions");
- assertPartitions(partitionsActual, partitions);
-
-
- // Remove all elements. Should set array attribute to null
- partitions.clear();
- newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- Assert.assertNull(tableDefinition.get("partitions"));
- }
-
- private void assertPartitions(List<Struct> partitionsActual, List<Struct> partitions) {
- assertEquals(partitionsActual.size(), partitions.size());
- for (int index = 0; index < partitions.size(); index++) {
- assertTrue(partitionsActual.get(index).equalsContents(partitions.get(index)));
- }
- }
-
- @Test(expectedExceptions = ValueConversionException.class)
- public void testCreateRequiredAttrNull() throws Exception {
- //Update required attribute
-
- Referenceable tableEntity = new Referenceable(TABLE_TYPE);
- tableEntity.set(NAME, "table_" + TestUtils.randomString());
-
- TestUtils.createInstance(metadataService, tableEntity);
- Assert.fail("Expected exception while creating with required attribute null");
- }
-
- @Test(expectedExceptions = ValueConversionException.class)
- public void testUpdateRequiredAttrToNull() throws Exception {
- //Update required attribute
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- assertEquals(tableDefinition.get("description"), "random table");
- table.setNull("description");
-
- updateInstance(table);
- Assert.fail("Expected exception while updating required attribute to null");
- }
-
- @Test
- public void testCheckOptionalAttrValueRetention() throws Exception {
-
- Referenceable entity = createDBEntity();
-
- String dbId = TestUtils.createInstance(metadataService, entity);
-
- entity = getEntity(dbId);
-
- //The optional boolean attribute should have a non-null value
- final String isReplicatedAttr = "isReplicated";
- final String paramsAttr = "parameters";
- Assert.assertNotNull(entity.get(isReplicatedAttr));
- Assert.assertEquals(entity.get(isReplicatedAttr), Boolean.FALSE);
- Assert.assertNull(entity.get(paramsAttr));
-
- //Update to true
- entity.set(isReplicatedAttr, Boolean.TRUE);
- //Update array
- final HashMap<String, String> params = new HashMap<String, String>() {{ put("param1", "val1"); put("param2", "val2"); }};
- entity.set(paramsAttr, params);
- //Complete update
- updateInstance(entity);
-
- entity = getEntity(dbId);
-
- Assert.assertNotNull(entity.get(isReplicatedAttr));
- Assert.assertEquals(entity.get(isReplicatedAttr), Boolean.TRUE);
- Assert.assertEquals(entity.get(paramsAttr), params);
-
- //Complete update without setting the attribute
- Referenceable newEntity = createDBEntity();
- //Reset name to the current DB name
- newEntity.set(NAME, entity.get(NAME));
- updateInstance(newEntity);
-
- entity = getEntity(dbId);
- Assert.assertNotNull(entity.get(isReplicatedAttr));
- Assert.assertEquals(entity.get(isReplicatedAttr), Boolean.TRUE);
- Assert.assertEquals(entity.get(paramsAttr), params);
- }
-
- private Referenceable getEntity(String guid) throws AtlasException {
- String entityJson = metadataService.getEntityDefinitionJson(guid);
- Assert.assertNotNull(entityJson);
- return InstanceSerialization.fromJsonReferenceable(entityJson, true);
- }
-
- @Test
- public void testUpdateOptionalAttrToNull() throws Exception {
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
-
- //Update optional Attribute
- Assert.assertNotNull(tableDefinition.get("created"));
- //Update optional attribute
- table.setNull("created");
-
- String newtableId = updateInstance(table).getUpdateEntities().get(0);
- assertEquals(newtableId, tableId._getId());
-
- tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- Assert.assertNull(tableDefinition.get("created"));
- }
-
- @Test
- public void testCreateEntityWithEnum ()throws Exception {
- String tableDefinitionJson =
- metadataService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, (String) table.get(NAME));
- Referenceable tableDefinition = InstanceSerialization.fromJsonReferenceable(tableDefinitionJson, true);
- EnumValue tableType = (EnumValue) tableDefinition.get("tableType");
-
- assertEquals(tableType, new EnumValue("MANAGED", 1));
- }
-
- @Test
- public void testGetEntityByUniqueAttribute() throws Exception {
- Referenceable entity = createDBEntity();
- TestUtils.createInstance(metadataService, entity);
-
- //get entity by valid qualified name
- String entityJson = metadataService.getEntityDefinition(TestUtils.DATABASE_TYPE, NAME,
- (String) entity.get(NAME));
- Assert.assertNotNull(entityJson);
- Referenceable referenceable = InstanceSerialization.fromJsonReferenceable(entityJson, true);
- assertEquals(referenceable.get(NAME), entity.get(NAME));
-
- //get entity by invalid qualified name
- try {
- metadataService.getEntityDefinition(TestUtils.DATABASE_TYPE, NAME, "random");
- Assert.fail("Expected EntityNotFoundException");
- } catch (EntityNotFoundException e) {
- //expected
- }
-
- //get entity by non-unique attribute
- try {
- metadataService.getEntityDefinition(TestUtils.DATABASE_TYPE, "description",
- (String) entity.get("description"));
- Assert.fail("Expected IllegalArgumentException");
- } catch (IllegalArgumentException e) {
- //expected
- }
- }
-
- @Test
- public void testDeleteEntities() throws Exception {
- // Create a table entity, with 3 composite column entities
- Referenceable dbEntity = createDBEntity();
- String dbGuid = TestUtils.createInstance(metadataService, dbEntity);
- Referenceable table1Entity = createTableEntity(dbGuid);
- Referenceable col1 = createColumnEntity();
- Referenceable col2 = createColumnEntity();
- Referenceable col3 = createColumnEntity();
- table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
- TestUtils.createInstance(metadataService, table1Entity);
-
- // Retrieve the table entities from the repository,
- // to get their guids and the composite column guids.
- String entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
- NAME, (String)table1Entity.get(NAME));
- Assert.assertNotNull(entityJson);
- table1Entity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
- List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) table1Entity.get(COLUMNS_ATTR_NAME);
-
- // Register an EntityChangeListener to verify the notification mechanism
- // is working for deleteEntities().
- EntitiesChangeListener listener = new EntitiesChangeListener();
- metadataService.registerListener(listener);
-
- //Delete one column
- String columnId = table1Columns.get(0).getId()._getId();
- EntityResult entityResult = deleteEntities(columnId);
- //column is deleted and table is updated
- assertEquals(entityResult.getDeletedEntities().get(0), columnId);
- assertEquals(entityResult.getUpdateEntities().get(0), table1Entity.getId()._getId());
-
- //verify listener was called for updates and deletes
- assertEquals(entityResult.getDeletedEntities(), listener.getDeletedEntities());
- assertEquals(entityResult.getUpdateEntities(), listener.getUpdatedEntities());
-
- // Delete the table entities. The deletion should cascade
- // to their composite columns.
- entityResult = deleteEntities(table1Entity.getId()._getId());
-
- // Verify that deleteEntities() response has guids for tables and their composite columns.
- Assert.assertTrue(entityResult.getDeletedEntities().contains(table1Entity.getId()._getId()));
- Assert.assertTrue(entityResult.getDeletedEntities().contains(table1Columns.get(1).getId()._getId()));
- Assert.assertTrue(entityResult.getDeletedEntities().contains(table1Columns.get(2).getId()._getId()));
-
- // Verify that tables and their composite columns have been deleted from the repository.
- assertEntityDeleted(TABLE_TYPE, NAME, table1Entity.get(NAME));
- assertEntityDeleted(COLUMN_TYPE, NAME, col2.get(NAME));
- assertEntityDeleted(COLUMN_TYPE, NAME, col3.get(NAME));
-
- // Verify that the listener was notified about the deleted entities.
- List<String> deletedEntitiesFromListener = listener.getDeletedEntities();
- Assert.assertNotNull(deletedEntitiesFromListener);
- assertEquals(deletedEntitiesFromListener.size(), entityResult.getDeletedEntities().size());
- Assert.assertTrue(deletedEntitiesFromListener.containsAll(entityResult.getDeletedEntities()));
- }
-
- private void assertEntityDeleted(String typeName, String attributeName, Object attributeValue)
- throws AtlasException {
- try {
- metadataService.getEntityDefinition(typeName, attributeName, (String) attributeValue);
- fail("Expected EntityNotFoundException");
- } catch(EntityNotFoundException e) {
- //expected
- }
- }
-
- @Test
- public void testDeleteEntityByUniqueAttribute() throws Exception {
- // Create a table entity, with 3 composite column entities
- Referenceable dbEntity = createDBEntity();
- String dbGuid = TestUtils.createInstance(metadataService, dbEntity);
- Referenceable table1Entity = createTableEntity(dbGuid);
- Referenceable col1 = createColumnEntity();
- Referenceable col2 = createColumnEntity();
- Referenceable col3 = createColumnEntity();
- table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
- TestUtils.createInstance(metadataService, table1Entity);
-
- // to get their guids and the composite column guids.
- String entityJson = metadataService.getEntityDefinition(TestUtils.TABLE_TYPE,
- NAME, (String) table1Entity.get(NAME));
- Assert.assertNotNull(entityJson);
- table1Entity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
- List<IReferenceableInstance> table1Columns = (List<IReferenceableInstance>) table1Entity.get(COLUMNS_ATTR_NAME);
-
- // Register an EntityChangeListener to verify the notification mechanism
- // is working for deleteEntityByUniqueAttribute().
- EntitiesChangeListener listener = new EntitiesChangeListener();
- metadataService.registerListener(listener);
-
- // Delete the table entities. The deletion should cascade
- // to their composite columns.
- List<String> deletedGuids = metadataService.deleteEntityByUniqueAttribute(TestUtils.TABLE_TYPE, NAME,
- (String) table1Entity.get(NAME)).getDeletedEntities();
-
- // Verify that deleteEntities() response has guids for tables and their composite columns.
- Assert.assertTrue(deletedGuids.contains(table1Entity.getId()._getId()));
- for (IReferenceableInstance column : table1Columns) {
- Assert.assertTrue(deletedGuids.contains(column.getId()._getId()));
- }
-
- // Verify that tables and their composite columns have been deleted from the repository.
- // Verify that tables and their composite columns have been deleted from the repository.
- assertEntityDeleted(TABLE_TYPE, NAME, table1Entity.get(NAME));
- assertEntityDeleted(COLUMN_TYPE, NAME, col1.get(NAME));
- assertEntityDeleted(COLUMN_TYPE, NAME, col2.get(NAME));
- assertEntityDeleted(COLUMN_TYPE, NAME, col3.get(NAME));
-
- // Verify that the listener was notified about the deleted entities.
- List<String> deletedEntitiesFromListener = listener.getDeletedEntities();
- Assert.assertNotNull(deletedEntitiesFromListener);
- assertEquals(deletedEntitiesFromListener.size(), deletedGuids.size());
- Assert.assertTrue(deletedEntitiesFromListener.containsAll(deletedGuids));
- }
-
- @Test
- public void testTypeUpdateFailureShouldRollBack() throws AtlasException, JSONException {
- String typeName = TestUtils.randomString(10);
- HierarchicalTypeDefinition<ClassType> typeDef = TypesUtil.createClassTypeDef(
- typeName, ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("test_type_attribute", DataTypes.STRING_TYPE));
- TypesDef typesDef = new TypesDef(typeDef, false);
- JSONObject type = metadataService.createType(TypesSerialization.toJson(typesDef));
- Assert.assertNotNull(type.get(AtlasClient.TYPES));
-
- HierarchicalTypeDefinition<ClassType> updatedTypeDef = TypesUtil.createClassTypeDef(
- typeName, ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("test_type_attribute", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("test_type_invalid_attribute$", DataTypes.STRING_TYPE));
- TypesDef updatedTypesDef = new TypesDef(updatedTypeDef, false);
-
- try {
- metadataService.updateType(TypesSerialization.toJson(updatedTypesDef));
- fail("Expected AtlasException");
- } catch (AtlasException e) {
- //expected
- }
-
- //type definition should reflect old type
- String typeDefinition = metadataService.getTypeDefinition(typeName);
- typesDef = TypesSerialization.fromJson(typeDefinition);
- assertEquals(typesDef.classTypes().head().attributeDefinitions.length, 1);
- }
-
- @Test
- public void testTypeWithDotsCreationShouldNotBeCreated() throws AtlasException, JSONException {
- String typeName = "test_.v1_type_XXXX";
- HierarchicalTypeDefinition<ClassType> typeDef = TypesUtil.createClassTypeDef(
- typeName, ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("test_type_attribute", DataTypes.STRING_TYPE));
- TypesDef typesDef = new TypesDef(typeDef, false);
-
- try {
- metadataService.createType(TypesSerialization.toJson(typesDef));
- fail("Expected IllegalArgumentException");
- } catch (IllegalArgumentException e) {
- assertTrue (e.getCause().getMessage().contains(AtlasTypeUtil.getInvalidTypeNameErrorMessage()), e.getCause().getMessage());
- }
- }
-
- @Test
- public void testAuditEventsInvalidParams() throws Exception {
- //entity id can't be null
- try {
- metadataService.getAuditEvents(null, "key", (short) 10);
- fail("expected IllegalArgumentException");
- } catch(IllegalArgumentException e) {
- //expected IllegalArgumentException
- assertEquals(e.getMessage(), "entity id cannot be null");
- }
-
- //entity id can't be empty
- try {
- metadataService.getAuditEvents("", "key", (short) 10);
- fail("expected IllegalArgumentException");
- } catch(IllegalArgumentException e) {
- //expected IllegalArgumentException
- assertEquals(e.getMessage(), "entity id cannot be empty");
- }
-
- //start key can be null
- metadataService.getAuditEvents("id", null, (short) 10);
-
- //start key can't be emoty
- try {
- metadataService.getAuditEvents("id", "", (short) 10);
- fail("expected IllegalArgumentException");
- } catch(IllegalArgumentException e) {
- //expected IllegalArgumentException
- assertEquals(e.getMessage(), "start key cannot be empty");
- }
-
- //number of results can't be > max value
- try {
- metadataService.getAuditEvents("id", "key", (short) 10000);
- fail("expected IllegalArgumentException");
- } catch(IllegalArgumentException e) {
- //expected IllegalArgumentException
- assertEquals(e.getMessage(), "count should be <= 1000, current value 10000");
- }
-
- //number of results can't be <= 0
- try {
- metadataService.getAuditEvents("id", "key", (short) -1);
- fail("expected IllegalArgumentException");
- } catch(IllegalArgumentException e) {
- //expected IllegalArgumentException
- assertEquals(e.getMessage(), "count should be > 0, current value -1");
- }
- }
-
- @Test
- public void testOnChangeRefresh() {
- try {
- List<String> beforeChangeTypeNames = new ArrayList<>();
- beforeChangeTypeNames.addAll(metadataService.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>()));
-
- typeDefChangeListener.onChange(new ChangedTypeDefs());
-
- List<String> afterChangeTypeNames = new ArrayList<>();
- afterChangeTypeNames.addAll(metadataService.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>()));
-
- Collections.sort(beforeChangeTypeNames);
- Collections.sort(afterChangeTypeNames);
- assertEquals(afterChangeTypeNames, beforeChangeTypeNames);
- } catch (AtlasBaseException e) {
- fail("Should've succeeded", e);
- } catch (AtlasException e) {
- fail("getTypeNames should've succeeded", e);
- }
- }
-
- @Test
- // ATLAS-2092: Concurrent edge label creation leads to inconsistency
- // This test tries entity creation in parallel and ensures that the edges with the same label actually get created
- public void testConcurrentCalls() {
- final HierarchicalTypeDefinition<ClassType> refType =
- createClassTypeDef(randomString(), ImmutableSet.<String>of());
- HierarchicalTypeDefinition<ClassType> type =
- createClassTypeDef(randomString(), ImmutableSet.<String>of(),
- new AttributeDefinition("ref", refType.typeName, Multiplicity.OPTIONAL, true, null));
- try {
- metadataService.createType(TypesSerialization.toJson(refType, false));
- metadataService.createType(TypesSerialization.toJson(type, false));
-
- String refId1 = createBasicEntity(refType);
- String refId2 = createBasicEntity(refType);
-
- // Add referenced entity for edge creation
- final Referenceable instance1 = new Referenceable(type.typeName);
- instance1.set("ref", new Referenceable(refId1, refType.typeName, null));
-
- // Add referenced entity for edge creation
- final Referenceable instance2 = new Referenceable(type.typeName);
- instance2.set("ref", new Referenceable(refId2, refType.typeName, null));
-
- ExecutorService executor = Executors.newFixedThreadPool(3);
- List<Future<Object>> futures = new ArrayList<>();
- // Try parallel creation of both the entities
- futures.add(executor.submit(new Callable<Object>() {
- @Override
- public Object call() throws Exception {
- return createEntity(instance1);
- }
- }));
- futures.add(executor.submit(new Callable<Object>() {
- @Override
- public Object call() throws Exception {
- return createEntity(instance2);
- }
- }));
- futures.add(executor.submit(new Callable<Object>() {
- @Override
- public Object call() throws Exception {
- return discoveryService.searchByDSL(TestUtils.TABLE_TYPE, new QueryParams(10, 0));
- }
- }));
-
- try {
- String id1 = (String) futures.get(0).get();
- String id2 = (String) futures.get(1).get();
- futures.get(2).get();
- executor.shutdown();
-
- assertNotNull(id1);
- assertNotNull(id2);
-
- boolean validated1 = assertEdge(id1, type.typeName);
- boolean validated2 = assertEdge(id2, type.typeName);
- assertTrue(validated1 && validated2);
- } catch (InterruptedException | ExecutionException e) {
- fail("Parallel entity creation failed", e);
- }
- } catch (AtlasException e) {
- fail("Type/Entity creation failed", e);
- }
- }
-
- private String createBasicEntity(final HierarchicalTypeDefinition<ClassType> refType) throws AtlasException {
- String json = InstanceSerialization.toJson(new Referenceable(refType.typeName), false);
- CreateUpdateEntitiesResult entities = metadataService.createEntities("[" + json + "]");
- return entities.getCreatedEntities().get(0);
- }
-
- private String createEntity(final Referenceable referenceable) throws AtlasException {
- String json = InstanceSerialization.toJson(referenceable, false);
- CreateUpdateEntitiesResult entities = metadataService.createEntities("[" + json + "]");
- return entities.getCreatedEntities().get(0);
- }
-
- private boolean assertEdge(String id, String typeName) throws AtlasException {
- AtlasGraph graph = TestUtils.getGraph();
- Iterable<AtlasVertex> vertices = graph.query().has(Constants.GUID_PROPERTY_KEY, id).vertices();
- AtlasVertex AtlasVertex = vertices.iterator().next();
- Iterable<AtlasEdge> edges = AtlasVertex.getEdges(AtlasEdgeDirection.OUT, Constants.INTERNAL_PROPERTY_KEY_PREFIX + typeName + ".ref");
- if (edges.iterator().hasNext()) {
- ITypedReferenceableInstance entity = metadataService.getEntityDefinition(id);
- assertNotNull(entity.get("ref"));
- return true;
- }
- return false;
- }
-
- private static class EntitiesChangeListener implements EntityChangeListener {
- private List<String> deletedEntities = new ArrayList<>();
- private List<String> updatedEntities = new ArrayList<>();
-
- @Override
- public void onEntitiesAdded(Collection<ITypedReferenceableInstance> entities, boolean isImport)
- throws AtlasException {
- }
-
- @Override
- public void onEntitiesUpdated(Collection<ITypedReferenceableInstance> entities, boolean isImport)
- throws AtlasException {
- updatedEntities.clear();
- for (ITypedReferenceableInstance entity : entities) {
- updatedEntities.add(entity.getId()._getId());
- }
- }
-
- @Override
- public void onTraitsAdded(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits)
- throws AtlasException {
- }
-
- @Override
- public void onTraitsDeleted(ITypedReferenceableInstance entity, Collection<String> traitNames)
- throws AtlasException {
- }
-
- @Override
- public void onTraitsUpdated(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits)
- throws AtlasException {
- }
-
- @Override
- public void onEntitiesDeleted(Collection<ITypedReferenceableInstance> entities, boolean isImport)
- throws AtlasException {
- deletedEntities.clear();
- for (ITypedReferenceableInstance entity : entities) {
- deletedEntities.add(entity.getId()._getId());
- }
- }
-
- public List<String> getDeletedEntities() {
- return deletedEntities;
- }
-
- public List<String> getUpdatedEntities() {
- return updatedEntities;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/service/StoreBackedTypeCacheMetadataServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/service/StoreBackedTypeCacheMetadataServiceTest.java b/repository/src/test/java/org/apache/atlas/service/StoreBackedTypeCacheMetadataServiceTest.java
deleted file mode 100644
index 5c37b9e..0000000
--- a/repository/src/test/java/org/apache/atlas/service/StoreBackedTypeCacheMetadataServiceTest.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.service;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.typestore.ITypeStore;
-import org.apache.atlas.repository.typestore.StoreBackedTypeCache;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.TypeUpdateException;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-
-
-/**
- * Verify MetadataService type operations trigger StoreBackedTypeCache to load non-cached types from the store.
- * StoreBackedTypeCacheTestModule Guice module sets Atlas configuration
- * to use {@link StoreBackedTypeCache} as the TypeCache implementation class.
- */
-@Guice(modules = TestModules.StoreBackedTypeCacheTestModule.class)
-public class StoreBackedTypeCacheMetadataServiceTest
-{
- @Inject
- private MetadataService metadataService;
-
- @Inject
- private ITypeStore typeStore;
-
- @Inject
- TypeCache typeCache;
-
- private StoreBackedTypeCache storeBackedTypeCache;
-
- private TypeSystem ts;
-
- @BeforeClass
- public void oneTimeSetup() throws Exception {
- Assert.assertTrue(typeCache instanceof StoreBackedTypeCache);
- storeBackedTypeCache = (StoreBackedTypeCache) typeCache;
-
- ts = TypeSystem.getInstance();
- ts.reset();
-
- // Populate the type store for testing.
- TestUtils.defineDeptEmployeeTypes(ts);
- TestUtils.createHiveTypes(ts);
- ImmutableList<String> typeNames = ts.getTypeNames();
- typeStore.store(ts, typeNames);
- }
-
- @BeforeMethod
- public void setUp() throws Exception {
- ts.reset();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- TypeSystem.getInstance().reset();
- AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testGetTypeDefinition() throws Exception {
- // Cache should be empty
- Assert.assertFalse(storeBackedTypeCache.isCachedInMemory("Manager"));
-
- // Type lookup on MetadataService should cause Manager type to be loaded from the type store
- // and cached.
- Assert.assertNotNull(metadataService.getTypeDefinition("Manager"));
- Assert.assertTrue(storeBackedTypeCache.isCachedInMemory("Manager"));
- }
-
- @Test
- public void testValidUpdateType() throws Exception {
- // Cache should be empty
- Assert.assertFalse(storeBackedTypeCache.isCachedInMemory(TestUtils.TABLE_TYPE));
-
- TypesDef typesDef = TestUtils.defineHiveTypes();
- String json = TypesSerialization.toJson(typesDef);
-
- // Update types with same definition, which should succeed.
- metadataService.updateType(json);
-
- // hive_table type should now be cached.
- Assert.assertTrue(storeBackedTypeCache.isCachedInMemory(TestUtils.TABLE_TYPE));
- }
-
- @Test
- public void testInvalidUpdateType() throws Exception {
- // Cache should be empty
- Assert.assertFalse(storeBackedTypeCache.isCachedInMemory(TestUtils.TABLE_TYPE));
-
- HierarchicalTypeDefinition<ClassType> classTypeDef = TypesUtil.createClassTypeDef(TestUtils.TABLE_TYPE, ImmutableSet.<String>of(),
- new AttributeDefinition("attr1", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null));
- String json = TypesSerialization.toJson(classTypeDef, false);
-
- // Try to update the type with disallowed changes. Should fail with TypeUpdateException.
- try {
- metadataService.updateType(json);
- Assert.fail(TypeUpdateException.class.getSimpleName() + " was expected but none thrown");
- }
- catch(TypeUpdateException e) {
- // good
- }
-
- // hive_table type should now be cached.
- Assert.assertTrue(storeBackedTypeCache.isCachedInMemory(TestUtils.TABLE_TYPE));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/util/CompiledQueryCacheKeyTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/util/CompiledQueryCacheKeyTest.java b/repository/src/test/java/org/apache/atlas/util/CompiledQueryCacheKeyTest.java
deleted file mode 100644
index 725aa43..0000000
--- a/repository/src/test/java/org/apache/atlas/util/CompiledQueryCacheKeyTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.util;
-
-import org.apache.atlas.query.QueryParams;
-import org.testng.annotations.Test;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotSame;
-
-/**
- * Tests hashcode/equals behavior of CompiledQueryCacheKey
- *
- *
- */
-public class CompiledQueryCacheKeyTest {
-
- @Test
- public void testNoQueryParams() {
-
-
- CompiledQueryCacheKey e1 = new CompiledQueryCacheKey("query 1");
- CompiledQueryCacheKey e2 = new CompiledQueryCacheKey("query 1");
- CompiledQueryCacheKey e3 = new CompiledQueryCacheKey("query 2");
-
- assertKeysEqual(e1, e2);
- assertKeysDifferent(e2, e3);
- }
-
-
- @Test
- public void testWithQueryParams() {
-
- CompiledQueryCacheKey e1 = new CompiledQueryCacheKey("query 1", new QueryParams(10,10));
- CompiledQueryCacheKey e2 = new CompiledQueryCacheKey("query 1", new QueryParams(10,10));
- CompiledQueryCacheKey e3 = new CompiledQueryCacheKey("query 2", new QueryParams(10,10));
-
- assertKeysEqual(e1, e2);
- assertKeysDifferent(e2, e3);
- }
-
- @Test
- public void testOnlyQueryParamsDifferent() {
-
-
- CompiledQueryCacheKey e1 = new CompiledQueryCacheKey("query 1", new QueryParams(10,10));
- CompiledQueryCacheKey e2 = new CompiledQueryCacheKey("query 1", new QueryParams(20,10));
-
- assertKeysDifferent(e1, e2);
- }
-
- @Test
- public void testOnlyDslDifferent() {
-
-
- CompiledQueryCacheKey e1 = new CompiledQueryCacheKey("query 1", new QueryParams(10,10));
- CompiledQueryCacheKey e2 = new CompiledQueryCacheKey("query 2", new QueryParams(10,10));
-
- assertKeysDifferent(e1, e2);
- }
-
-
- @Test
- public void testMixOfQueryParamsAndNone() {
-
-
- CompiledQueryCacheKey e1 = new CompiledQueryCacheKey("query 1", new QueryParams(10,10));
- CompiledQueryCacheKey e2 = new CompiledQueryCacheKey("query 1");
-
- assertKeysDifferent(e1, e2);
- }
-
-
- private void assertKeysEqual(CompiledQueryCacheKey e1, CompiledQueryCacheKey e2) {
-
- assertEquals(e1.hashCode(), e2.hashCode());
- assertEquals(e1, e2);
- assertEquals(e2, e1);
- }
-
- private void assertKeysDifferent(CompiledQueryCacheKey e1, CompiledQueryCacheKey e2) {
-
- assertNotSame(e1.hashCode(), e2.hashCode());
- assertNotSame(e1, e2);
- assertNotSame(e2, e1);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/utils/HiveModel.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/utils/HiveModel.java b/repository/src/test/java/org/apache/atlas/utils/HiveModel.java
deleted file mode 100644
index 4fc0473..0000000
--- a/repository/src/test/java/org/apache/atlas/utils/HiveModel.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.utils;
-
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.lang.reflect.Field;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Date;
-import java.util.List;
-
-/**
- * Allows easy creation of entities for classes in the hive test model.
- *
- */
-public class HiveModel {
-
- public static class StructInstance {
-
- public String getTypeName() {
- return getClass().getSimpleName();
- }
-
- public Struct toStruct() throws Exception {
-
- Struct entity = new Struct(getTypeName());
- addDeclaredFields(getClass(), entity);
- return entity;
- }
-
- protected void addDeclaredFields(Class clazz, Struct r) throws Exception {
-
- for (Field f : clazz.getDeclaredFields()) {
-
- if (Modifier.isTransient(f.getModifiers())) {
- continue;
- }
- String fieldName = f.getName();
-
- f.setAccessible(true);
- Object value = f.get(this);
-
- if (value instanceof List) {
-
- List listValue = (List) value;
- List toSet = new ArrayList(listValue.size());
- for (Object listItem : listValue) {
- Object toAdd = null;
- toAdd = convertValue(listItem);
- toSet.add(toAdd);
- }
- r.set(fieldName, toSet);
- } else {
-
- Object converted = convertValue(value);
- r.set(fieldName, converted);
- }
- }
-
- if (clazz != StructInstance.class) {
- addDeclaredFields(clazz.getSuperclass(), r);
- }
- }
-
- private Object convertValue(Object toConvert) throws Exception {
-
- if (toConvert instanceof ClassInstance) {
- return ((ClassInstance) toConvert).toReferenceable();
- }
- if (toConvert instanceof StructInstance) {
- return ((StructInstance) toConvert).toStruct();
- } else {
- return toConvert;
- }
- }
- }
-
- public static class ClassInstance<T> extends StructInstance {
-
- private transient final Id guid;
- private transient List<String> traits = new ArrayList();
-
- public T withTrait(String name) {
- traits.add(name);
- return getInstance();
- }
-
- public T withTraits(List<String> names) {
- traits.addAll(names);
- return getInstance();
- }
-
- public T getInstance() {
- return (T) this;
- }
-
- public ClassInstance() {
- guid = new Id(getTypeName());
- }
-
- public Referenceable toReferenceable() throws Exception {
-
- String[] traitArray = new String[traits.size()];
- traitArray = traits.toArray(traitArray);
- Referenceable entity = new Referenceable(getTypeName(), traitArray);
- entity.replaceWithNewId(guid);
- addDeclaredFields(getClass(), entity);
-
- return entity;
- }
-
- public List<ITypedReferenceableInstance> getTypedReferencebles() throws Exception {
-
- List<ITypedReferenceableInstance> result = new ArrayList();
- for (ClassInstance containedInstance : getAllInstances()) {
- Referenceable entity = containedInstance.toReferenceable();
- ClassType type = TypeSystem.getInstance().getDataType(ClassType.class, entity.getTypeName());
- ITypedReferenceableInstance converted = type.convert(entity, Multiplicity.REQUIRED);
- result.add(converted);
- }
- return result;
- }
-
- protected List<ClassInstance> getAllInstances() {
-
- return (List) Collections.singletonList(this);
- }
-
- public Id getId() {
- return guid;
- }
- }
-
- public static class NamedInstance<T> extends ClassInstance<T> {
-
- private final String name;
-
- public NamedInstance(String name) {
- super();
- this.name = name;
- }
- }
-
- public static class HiveOrder extends StructInstance {
-
- private String col;
- private int order;
-
- public HiveOrder(String col, int order) {
- super();
- this.col = col;
- this.order = order;
- }
-
- }
-
- public static class DB extends NamedInstance<DB> {
-
- private String owner;
- private int createTime;
- private String clusterName;
-
- public DB(String name, String owner, int createTime, String clusterName) {
- super(name);
- this.owner = owner;
- this.createTime = createTime;
- this.clusterName = clusterName;
- }
- }
-
- public static class StorageDescriptor extends ClassInstance<StorageDescriptor> {
-
- private String inputFormat;
- private String outputFormat;
- private List<HiveOrder> sortCols;
-
- public StorageDescriptor(String inputFormat, String outputFormat, List<HiveOrder> sortCols) {
- super();
- this.inputFormat = inputFormat;
- this.outputFormat = outputFormat;
- this.sortCols = sortCols;
- }
- }
-
- public static class Column extends NamedInstance<Column> {
-
- private String type;
- private StorageDescriptor sd;
-
- public Column(String name, String type) {
- super(name);
- this.type = type;
- }
-
- public void setStorageDescriptor(StorageDescriptor sd) {
- this.sd = sd;
- }
- }
-
- public static class Table extends NamedInstance<Table> {
-
- private DB db;
- private Date created;
- private StorageDescriptor sd;
- private transient List<Column> colDefs;
-
- public Table(String name, DB db, StorageDescriptor sd, List<Column> colDefs) {
- this(name, db, sd, new Date(TestUtils.TEST_DATE_IN_LONG), colDefs);
- }
-
- public Table(String name, DB db, StorageDescriptor sd, Date created, List<Column> colDefs) {
-
- super(name);
- this.colDefs = colDefs;
- this.db = db;
- this.sd = sd;
- this.created = created;
- for (Column col : colDefs) {
- col.setStorageDescriptor(sd);
- }
- }
-
- public List<Column> getColumns() {
- return colDefs;
- }
-
- @Override
- protected List<ClassInstance> getAllInstances() {
-
- List<ClassInstance> result = new ArrayList(colDefs.size() + 2);
- result.add(sd);
- result.addAll(colDefs);
- result.add(this);
- return result;
- }
- }
-
- public static class Partition extends ClassInstance<Partition> {
-
- private List<String> values;
- private Table table;
-
- public Partition(List<String> values, Table table) {
-
- super();
- this.values = values;
- this.table = table;
- }
-
- }
-
- public static class LoadProcess extends NamedInstance<LoadProcess> {
-
- private List<Table> inputTables;
- private Table outputTable;
-
- public LoadProcess(String name, List<Table> inputTables, Table outputTable) {
- super(name);
- this.inputTables = inputTables;
- this.outputTable = outputTable;
- }
-
- }
-
- public static class View extends NamedInstance<View> {
-
- private DB db;
- private List<Table> inputTables;
-
- public View(String name, DB db, List<Table> inputTables) {
- super(name);
- this.db = db;
- this.inputTables = inputTables;
- }
-
- }
-
-}
[11/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java
deleted file mode 100755
index ba05a45..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/Id.java
+++ /dev/null
@@ -1,307 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.persistence;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.utils.ParamChecker;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.utils.SHA256Utils;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.Date;
-import java.util.Map;
-import java.util.Objects;
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicLong;
-
-public class Id implements ITypedReferenceableInstance {
- public enum EntityState {
- ACTIVE, DELETED
- }
-
- public final String id;
- public final String typeName;
- public final int version;
- public EntityState state;
- private static AtomicLong s_nextId = new AtomicLong(System.nanoTime());
- public final AtlasSystemAttributes systemAttributes;
-
- public Id(String id, int version, String typeName, String state) {
- id = ParamChecker.notEmpty(id, "id");
- typeName = ParamChecker.notEmpty(typeName, "typeName");
- state = ParamChecker.notEmptyIfNotNull(state, "state");
- this.id = id;
- this.typeName = typeName;
- this.version = version;
- if (state == null) {
- this.state = EntityState.ACTIVE;
- } else {
- this.state = EntityState.valueOf(state.toUpperCase());
- }
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- public Id(String id, int version, String typeName) {
- this(id, version, typeName, null);
- }
-
- public Id(long id, int version, String typeName) {
- this("" + id, version, typeName);
- }
-
- public Id(long id, int version, String typeName, String state) {
- this("" + id, version, typeName, state);
- }
-
- public Id(String typeName) {
- this("" + Id.nextNegativeLong(), 0, typeName);
- }
-
- public boolean isUnassigned() {
- try {
- long l = Long.parseLong(id);
- return l < 0;
- } catch (NumberFormatException ne) {
- return false;
- }
- }
-
- public boolean isAssigned() {
- try {
- UUID.fromString(id);
- } catch (IllegalArgumentException e) {
- return false;
- }
-
- return true;
- }
-
- @Override
- public String toString() {
- return String.format("(type: %s, id: %s)", typeName, isUnassigned() ? "<unassigned>" : "" + id);
- }
-
- @Override
- public String toShortString() {
- return String.format("id[type=%s guid=%s state=%s]", typeName, id, state);
- }
-
- @Override
- public AtlasSystemAttributes getSystemAttributes(){
- return systemAttributes;
- }
-
- public String getClassName() {
- return typeName;
- }
-
- public int getVersion() {
- return version;
- }
-
- public String _getId() {
- return id;
- }
-
- public EntityState getState() {
- return state;
- }
-
- public String getStateAsString() {
- return state == null ? null : state.name();
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- Id id1 = (Id) o;
- return version == id1.version &&
- Objects.equals(id, id1.id) &&
- Objects.equals(typeName, id1.typeName) &&
- state == id1.state;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(id, typeName, version, state);
- }
-
- @Override
- public ImmutableList<String> getTraits() {
- return null;
- }
-
- @Override
- public Id getId() {
- return this;
- }
-
- @Override
- public IStruct getTrait(String typeName) {
- return null;
- }
-
- @Override
- public String getTypeName() {
- return typeName;
- }
-
- @Override
- public Object get(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- @Override
- public void set(String attrName, Object val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- @Override
- public FieldMapping fieldMapping() {
- return null;
- }
-
- @Override
- public Map<String, Object> getValuesMap() throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setNull(String attrName) throws AtlasException {
- set(attrName, null);
- }
-
- public boolean getBoolean(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public byte getByte(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public short getShort(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public int getInt(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public long getLong(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public float getFloat(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public double getDouble(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public BigInteger getBigInt(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public BigDecimal getBigDecimal(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public Date getDate(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public String getString(String attrName) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setBoolean(String attrName, boolean val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setByte(String attrName, byte val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setShort(String attrName, short val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setInt(String attrName, int val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setLong(String attrName, long val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setFloat(String attrName, float val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setDouble(String attrName, double val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setBigInt(String attrName, BigInteger val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setBigDecimal(String attrName, BigDecimal val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setDate(String attrName, Date val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public void setString(String attrName, String val) throws AtlasException {
- throw new AtlasException("Get/Set not supported on an Id object");
- }
-
- public boolean isValueSet(String attrName) throws AtlasException {
- throw new AtlasException("Attributes not set on an Id object");
- }
-
- @Override
- public String getSignatureHash(MessageDigest digester) throws AtlasException {
- digester.update(id.getBytes(Charset.forName("UTF-8")));
- digester.update(typeName.getBytes(Charset.forName("UTF-8")));
- byte[] digest = digester.digest();
- return SHA256Utils.toString(digest);
- }
-
- private static long nextNegativeLong() {
- long ret = s_nextId.getAndDecrement();
-
- if (ret > 0) {
- ret *= -1;
- } else if (ret == 0) {
- ret = Long.MIN_VALUE;
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/MapIds.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/MapIds.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/MapIds.java
deleted file mode 100755
index e62f29d..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/MapIds.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.persistence;
-
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.ObjectGraphWalker;
-
-import java.util.Map;
-
-public class MapIds implements ObjectGraphWalker.NodeProcessor {
-
- final Map<Id, Id> idToNewIdMap;
-
- public MapIds(Map<Id, Id> idToNewIdMap) {
- this.idToNewIdMap = idToNewIdMap;
- }
-
- @Override
- public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
-
- IReferenceableInstance ref = null;
- Id id = null;
-
- if (nd.attributeName == null) {
- ref = (IReferenceableInstance) nd.instance;
- Id newId = idToNewIdMap.get(ref.getId());
- if (newId != null) {
- ((ReferenceableInstance) ref).replaceWithNewId(newId);
- }
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (nd.value != null && nd.value instanceof IReferenceableInstance) {
- Id oldId = ((IReferenceableInstance) nd.value).getId();
- Id newId = idToNewIdMap.get(oldId);
- /*
- * Replace Instances with Ids, irrespective of whether they map to newIds or not.
- */
- newId = newId == null ? oldId : newId;
- nd.instance.set(nd.attributeName, newId);
- }
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- DataTypes.ArrayType aT = (DataTypes.ArrayType) nd.aInfo.dataType();
- Object v = aT.mapIds((ImmutableCollection) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
- nd.instance.set(nd.attributeName, v);
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
- Object v = mT.mapIds((ImmutableMap) nd.value, nd.aInfo.multiplicity, idToNewIdMap);
- nd.instance.set(nd.attributeName, v);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/ReferenceableInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/ReferenceableInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/ReferenceableInstance.java
deleted file mode 100755
index be2634d..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/ReferenceableInstance.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.persistence;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.utils.SHA256Utils;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.security.MessageDigest;
-import java.util.Date;
-import java.util.HashSet;
-
-/*
- * @todo handle names prefixed by traitName.
- */
-public class ReferenceableInstance extends StructInstance implements ITypedReferenceableInstance {
-
- private final ImmutableMap<String, ITypedStruct> traits;
- private final ImmutableList<String> traitNames;
- private Id id;
- private AtlasSystemAttributes systemAttributes;
-
-
- public ReferenceableInstance(Id id, String dataTypeName, AtlasSystemAttributes systemAttributes, FieldMapping fieldMapping, boolean[] nullFlags,
- boolean[] explicitSets, boolean[] bools, byte[] bytes, short[] shorts, int[] ints, long[] longs, float[] floats, double[] doubles,
- BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
- ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, StructInstance[] structs,
- ReferenceableInstance[] referenceableInstances, Id[] ids, ImmutableMap<String, ITypedStruct> traits) {
- super(dataTypeName, fieldMapping, nullFlags, explicitSets, bools, bytes, shorts, ints, longs, floats, doubles, bigDecimals,
- bigIntegers, dates, strings, arrays, maps, structs, referenceableInstances, ids);
- this.id = id;
- this.traits = traits;
- ImmutableList.Builder<String> b = new ImmutableList.Builder<>();
- for (String t : traits.keySet()) {
- b.add(t);
- }
- this.traitNames = b.build();
- if (systemAttributes == null){
- this.systemAttributes = new AtlasSystemAttributes();
- }
- else {
- this.systemAttributes = systemAttributes;
- }
- }
-
- @Override
- public ImmutableList<String> getTraits() {
- return traitNames;
- }
-
- @Override
- public Id getId() {
- return id;
- }
-
- @Override
- public IStruct getTrait(String typeName) {
- return traits.get(typeName);
- }
-
- @Override
- public AtlasSystemAttributes getSystemAttributes(){
- return systemAttributes;
- }
-
- /**
- * @nopub
- * @param id
- */
- public void replaceWithNewId(Id id) {
- this.id = id;
- }
-
- @Override
- public String toString() {
- try {
- StringBuilder buf = new StringBuilder();
- String prefix = "";
-
- fieldMapping.output(this, buf, prefix, new HashSet<IReferenceableInstance>());
- return buf.toString();
-
- } catch (AtlasException me) {
- throw new RuntimeException(me);
- }
- }
-
- @Override
- public String toShortString() {
- String name = null;
- if (fieldMapping().fields.containsKey("name")) {
- try {
- name = getString("name");
- } catch (AtlasException e) {
- //ignore if there is no field name
- }
- }
- return String.format("entity[type=%s guid=%s name=%s]", getTypeName(), getId()._getId(), name);
- }
-
- @Override
- public String getSignatureHash(MessageDigest digester) throws AtlasException {
- ClassType classType = TypeSystem.getInstance().getDataType(ClassType.class, getTypeName());
- classType.updateSignatureHash(digester, this);
- byte[] digest = digester.digest();
- return SHA256Utils.toString(digest);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/StructInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/StructInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/StructInstance.java
deleted file mode 100755
index 766d2d0..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/StructInstance.java
+++ /dev/null
@@ -1,790 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.persistence;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.ValueConversionException;
-import org.apache.atlas.utils.SHA256Utils;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.security.MessageDigest;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-public class StructInstance implements ITypedStruct {
- public final String dataTypeName;
- public final FieldMapping fieldMapping;
- public final boolean nullFlags[];
- public final boolean explicitSets[];
- public final boolean[] bools;
- public final byte[] bytes;
- public final short[] shorts;
- public final int[] ints;
- public final long[] longs;
- public final float[] floats;
- public final double[] doubles;
- public final BigDecimal[] bigDecimals;
- public final BigInteger[] bigIntegers;
- public final Date[] dates;
- public final String[] strings;
- public final ImmutableList<Object>[] arrays;
- public final ImmutableMap<Object, Object>[] maps;
- public final StructInstance[] structs;
- public final ReferenceableInstance[] referenceables;
- public final Id[] ids;
-
- public StructInstance(String dataTypeName, FieldMapping fieldMapping, boolean[] nullFlags, boolean[] explicitSets, boolean[] bools,
- byte[] bytes, short[] shorts, int[] ints, long[] longs, float[] floats, double[] doubles,
- BigDecimal[] bigDecimals, BigInteger[] bigIntegers, Date[] dates, String[] strings,
- ImmutableList<Object>[] arrays, ImmutableMap<Object, Object>[] maps, StructInstance[] structs,
- ReferenceableInstance[] referenceables, Id[] ids) {
- assert dataTypeName != null;
- this.dataTypeName = dataTypeName;
- this.fieldMapping = fieldMapping;
- this.nullFlags = nullFlags;
- this.explicitSets = explicitSets;
- this.bools = bools;
- this.bytes = bytes;
- this.shorts = shorts;
- this.ints = ints;
- this.longs = longs;
- this.floats = floats;
- this.doubles = doubles;
- this.bigDecimals = bigDecimals;
- this.bigIntegers = bigIntegers;
- this.dates = dates;
- this.strings = strings;
- this.arrays = arrays;
- this.maps = maps;
- this.structs = structs;
- this.referenceables = referenceables;
- this.ids = ids;
-
- for (int i = 0; i < nullFlags.length; i++) {
- nullFlags[i] = true;
- }
-
- for (int i = 0; i < explicitSets.length; i++) {
- explicitSets[i] = false;
- }
- }
-
- @Override
- public String getTypeName() {
- return dataTypeName;
- }
-
- @Override
- public FieldMapping fieldMapping() {
- return fieldMapping;
- }
-
- public void set(String attrName, Object val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new ValueConversionException(getTypeName(), val, "Unknown field " + attrName);
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
- Object cVal = null;
-
- explicitSets[nullPos] = true;
-
- if (val != null && val instanceof Id) {
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, i.dataType().getName());
- clsType.validateId((Id) val);
- cVal = val;
- } else {
- try {
- cVal = i.dataType().convert(val, i.multiplicity);
- } catch(ValueConversionException.NullConversionException e) {
- throw new ValueConversionException.NullConversionException("For field '" + attrName + "'", e);
- }
- }
- if (cVal == null) {
- nullFlags[nullPos] = true;
- return;
- }
- nullFlags[nullPos] = false;
- if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- bools[pos] = (Boolean) cVal;
- } else if (i.dataType() == DataTypes.BYTE_TYPE) {
- bytes[pos] = (Byte) cVal;
- } else if (i.dataType() == DataTypes.SHORT_TYPE) {
- shorts[pos] = (Short) cVal;
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- ints[pos] = (Integer) cVal;
- } else if (i.dataType() == DataTypes.LONG_TYPE) {
- longs[pos] = (Long) cVal;
- } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
- floats[pos] = (Float) cVal;
- } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
- doubles[pos] = (Double) cVal;
- } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- bigIntegers[pos] = (BigInteger) cVal;
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- bigDecimals[pos] = (BigDecimal) cVal;
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- dates[pos] = (Date) cVal;
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- strings[pos] = (String) cVal;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) {
- ints[pos] = ((EnumValue) cVal).ordinal;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- arrays[pos] = (ImmutableList) cVal;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- maps[pos] = (ImmutableMap) cVal;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- structs[pos] = (StructInstance) cVal;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (cVal instanceof Id) {
- ids[pos] = (Id) cVal;
- } else {
- referenceables[pos] = (ReferenceableInstance) cVal;
- }
- } else {
- throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
- }
- }
-
- public Object get(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- if ( i.dataType().getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
- return ((DataTypes.PrimitiveType) i.dataType()).nullValue();
- } else {
- return null;
- }
- }
-
- if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- return bools[pos];
- } else if (i.dataType() == DataTypes.BYTE_TYPE) {
- return bytes[pos];
- } else if (i.dataType() == DataTypes.SHORT_TYPE) {
- return shorts[pos];
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- return ints[pos];
- } else if (i.dataType() == DataTypes.LONG_TYPE) {
- return longs[pos];
- } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
- return floats[pos];
- } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
- return doubles[pos];
- } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- return bigIntegers[pos];
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- return bigDecimals[pos];
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- return dates[pos];
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- return strings[pos];
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) {
- return ((EnumType) i.dataType()).fromOrdinal(ints[pos]);
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- return arrays[pos];
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- return maps[pos];
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- return structs[pos];
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (ids[pos] != null) {
- return ids[pos];
- } else {
- return referenceables[pos];
- }
- } else {
- throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
- }
- }
-
- public void setNull(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
- nullFlags[nullPos] = true;
- explicitSets[nullPos] = true;
-
- int pos = fieldMapping.fieldPos.get(attrName);
-
- if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- bigIntegers[pos] = null;
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- bigDecimals[pos] = null;
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- dates[pos] = null;
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- ints[pos] = 0;
- } else if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- bools[pos] = false;
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- strings[pos] = null;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- arrays[pos] = null;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- maps[pos] = null;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- structs[pos] = null;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- ids[pos] = null;
- referenceables[pos] = null;
- } else {
- throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
- }
- }
-
- /*
- * Use only for json serialization
- * @nonpublic
- */
- @Override
- public Map<String, Object> getValuesMap() throws AtlasException {
- Map<String, Object> m = new HashMap<>();
- for (String attr : fieldMapping.fields.keySet()) {
-// int pos = fieldMapping.fieldNullPos.get(attr);
-// if ( explicitSets[pos] ) {
- m.put(attr, get(attr));
-// }
- }
- return m;
- }
-
- public boolean getBoolean(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BOOLEAN_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.BOOLEAN_TYPE.nullValue();
- }
-
- return bools[pos];
- }
-
- public byte getByte(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BYTE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.BYTE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.BYTE_TYPE.nullValue();
- }
-
- return bytes[pos];
- }
-
- public short getShort(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.SHORT_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.SHORT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.SHORT_TYPE.nullValue();
- }
-
- return shorts[pos];
- }
-
- public int getInt(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
-
- if (i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.INT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.INT_TYPE.nullValue();
- }
-
- return ints[pos];
- }
-
- public long getLong(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.LONG_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.LONG_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.LONG_TYPE.nullValue();
- }
-
- return longs[pos];
- }
-
- public float getFloat(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.FLOAT_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.FLOAT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.FLOAT_TYPE.nullValue();
- }
-
- return floats[pos];
- }
-
- public double getDouble(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.DOUBLE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.DOUBLE_TYPE.nullValue();
- }
-
- return doubles[pos];
- }
-
- public BigInteger getBigInt(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BIGINTEGER_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.BIGINTEGER_TYPE.nullValue();
- }
-
- return bigIntegers[pos];
- }
-
- public BigDecimal getBigDecimal(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BIGDECIMAL_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.BIGDECIMAL_TYPE.nullValue();
- }
-
- return bigDecimals[pos];
- }
-
- public Date getDate(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.DATE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.DATE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.DATE_TYPE.nullValue();
- }
-
- return dates[pos];
- }
-
- public String getString(String attrName) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.STRING_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic get method", attrName,
- getTypeName(), DataTypes.STRING_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- if (nullFlags[nullPos]) {
- return DataTypes.STRING_TYPE.nullValue();
- }
-
- return strings[pos];
- }
-
- public void setBoolean(String attrName, boolean val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BOOLEAN_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.BOOLEAN_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- bools[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setByte(String attrName, byte val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BYTE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.BYTE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- bytes[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setShort(String attrName, short val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.SHORT_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.SHORT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- shorts[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setInt(String attrName, int val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.INT_TYPE && !(i.dataType() instanceof EnumType)) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.INT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- ints[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setLong(String attrName, long val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.LONG_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.LONG_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- longs[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setFloat(String attrName, float val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.FLOAT_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.FLOAT_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- floats[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setDouble(String attrName, double val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.DOUBLE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.DOUBLE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = false;
- doubles[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setBigInt(String attrName, BigInteger val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BIGINTEGER_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.BIGINTEGER_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = val == null;
- bigIntegers[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setBigDecimal(String attrName, BigDecimal val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.BIGDECIMAL_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.BIGDECIMAL_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = val == null;
- bigDecimals[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setDate(String attrName, Date val) throws AtlasException {
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.DATE_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.DATE_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = val == null;
- dates[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- public void setString(String attrName, String val) throws AtlasException {
-
- AttributeInfo i = fieldMapping.fields.get(attrName);
- if (i == null) {
- throw new AtlasException(String.format("Unknown field %s for Struct %s", attrName, getTypeName()));
- }
-
- if (i.dataType() != DataTypes.STRING_TYPE) {
- throw new AtlasException(
- String.format("Field %s for Struct %s is not a %s, call generic set method", attrName,
- getTypeName(), DataTypes.STRING_TYPE.getName()));
- }
-
- int pos = fieldMapping.fieldPos.get(attrName);
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
-
- nullFlags[nullPos] = val == null;
- strings[pos] = val;
- explicitSets[nullPos] = true;
- }
-
- @Override
- public String toString() {
- try {
- StringBuilder buf = new StringBuilder();
- String prefix = "";
-
- fieldMapping.output(this, buf, prefix, null);
- return buf.toString();
-
- } catch (AtlasException me) {
- throw new RuntimeException(me);
- }
- }
-
- @Override
- public String getSignatureHash(MessageDigest digester) throws AtlasException {
- StructType structType = TypeSystem.getInstance().getDataType(StructType.class, getTypeName());
- structType.updateSignatureHash(digester, this);
- byte[] digest = digester.digest();
- return SHA256Utils.toString(digest);
- }
-
- @Override
- public boolean isValueSet(final String attrName) throws AtlasException {
- int nullPos = fieldMapping.fieldNullPos.get(attrName);
- return explicitSets[nullPos];
- }
-
- @Override
- public String toShortString() {
- return String.format("struct[type=%s]", dataTypeName);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java
deleted file mode 100755
index 874138b..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AbstractDataType.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSortedMap;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Set;
-
-abstract class AbstractDataType<T> implements IDataType<T> {
-
- public final String name;
- public final String description;
- public final String version;
-
- public AbstractDataType(String name, String description) {
-
- super();
- this.name = name;
- this.description = description;
- this.version = AtlasConstants.DEFAULT_TYPE_VERSION;
- }
-
- public AbstractDataType(String name, String description, String version) {
-
- super();
- this.name = name;
- this.description = description;
- this.version = version;
- }
-
- protected T convertNull(Multiplicity m) throws AtlasException {
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
- return null;
- }
-
- @Override
- public void output(T val, Appendable buf, String prefix, Set<T> inProcess) throws AtlasException {
- final String strValue;
-
- if (val == null) {
- strValue = "<null>";
- } else if (val instanceof Map) {
- ImmutableSortedMap immutableSortedMap = ImmutableSortedMap.copyOf((Map) val);
- strValue = immutableSortedMap.toString();
- } else {
- strValue = val.toString();
- }
-
- TypeUtils.outputVal(strValue, buf, prefix);
- }
-
- @Override
- public void output(Appendable buf, Set<String> typesInProcess) throws AtlasException {
-
- try {
- buf.append(toString());
- } catch (IOException e) {
- throw new AtlasException(e);
- }
- }
-
- /* (non-Javadoc)
- * @see java.lang.Object#toString()
- */
- @Override
- public String toString() {
- return "{name=" + name + ", description=" + description + "}";
- }
-
- /**
- * Validate that current definition can be updated with the new definition
- * @param newType
- */
- @Override
- public void validateUpdate(IDataType newType) throws TypeUpdateException {
- if (!getName().equals(newType.getName()) || !getClass().getName().equals(newType.getClass().getName())) {
- throw new TypeUpdateException(newType);
- }
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public String getDescription() {
- return description;
- }
-
- @Override
- public String getVersion() {
- return version;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeDefinition.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeDefinition.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeDefinition.java
deleted file mode 100755
index 5561f0b..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeDefinition.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.utils.ParamChecker;
-
-import java.util.Objects;
-
-public final class AttributeDefinition {
-
- public final String name;
- public final String dataTypeName;
- public final Multiplicity multiplicity;
- //A composite is the one whose lifecycle is dependent on the enclosing type and is not just a reference
- public final boolean isComposite;
- public final boolean isUnique;
- public final boolean isIndexable;
-
- /**
- * If this is a reference attribute, then the name of the attribute on the Class
- * that this refers to.
- */
- public final String reverseAttributeName;
-
- public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
- String reverseAttributeName) {
- this(name, dataTypeName, multiplicity, isComposite, false, false, reverseAttributeName);
-
- }
-
- public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
- boolean isUnique, boolean isIndexable, String reverseAttributeName) {
- this.name = ParamChecker.notEmpty(name, "Attribute name");
- this.dataTypeName = ParamChecker.notEmpty(dataTypeName, "Attribute type");
- this.multiplicity = multiplicity;
- this.isComposite = isComposite;
- this.isUnique = isUnique;
- this.isIndexable = isIndexable;
- this.reverseAttributeName = ParamChecker.notEmptyIfNotNull(reverseAttributeName, "Reverse attribute name");
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- AttributeDefinition that = (AttributeDefinition) o;
- return isComposite == that.isComposite &&
- isUnique == that.isUnique &&
- isIndexable == that.isIndexable &&
- Objects.equals(name, that.name) &&
- Objects.equals(dataTypeName, that.dataTypeName) &&
- Objects.equals(multiplicity, that.multiplicity) &&
- Objects.equals(reverseAttributeName, that.reverseAttributeName);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name, dataTypeName, multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName);
- }
-
- @Override
- public String toString() {
- return name;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeInfo.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeInfo.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeInfo.java
deleted file mode 100755
index c24a55f..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/AttributeInfo.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-
-public class AttributeInfo {
- public final String name;
- public final Multiplicity multiplicity;
- //A composite is the one whose lifecycle is dependent on the enclosing type and is not just a reference
- public final boolean isComposite;
- public final boolean isUnique;
- public final boolean isIndexable;
- /**
- * If this is a reference attribute, then the name of the attribute on the Class
- * that this refers to.
- */
- public final String reverseAttributeName;
- private IDataType dataType;
-
- public AttributeInfo(TypeSystem t, AttributeDefinition def, Map<String, IDataType> tempTypes) throws AtlasException {
- this.name = def.name;
- this.dataType =
- (tempTypes != null && tempTypes.containsKey(def.dataTypeName)) ? tempTypes.get(def.dataTypeName) :
- t.getDataType(IDataType.class, def.dataTypeName);
- this.multiplicity = def.multiplicity;
- this.isComposite = def.isComposite;
- this.isUnique = def.isUnique;
- this.isIndexable = def.isIndexable;
- this.reverseAttributeName = def.reverseAttributeName;
- }
-
- public IDataType dataType() {
- return dataType;
- }
-
- void setDataType(IDataType dT) {
- dataType = dT;
- }
-
- @Override
- public String toString() {
- StringBuilder buf = new StringBuilder();
- try {
- output(buf, new HashSet<String>());
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- return buf.toString();
- }
-
- public void output(Appendable buf, Set<String> typesInProcess) throws AtlasException {
- try {
- buf.append("{name=").append(name);
- buf.append(", dataType=");
- dataType.output(buf, typesInProcess);
- buf.append(", multiplicity=").append(multiplicity.toString());
- buf.append(", isComposite=").append(Boolean.toString(isComposite));
- buf.append(", isUnique=").append(Boolean.toString(isUnique));
- buf.append(", isIndexable=").append(Boolean.toString(isIndexable));
- buf.append(", reverseAttributeName=").append(reverseAttributeName);
- buf.append('}');
- }
- catch(IOException e) {
- throw new AtlasException(e);
- }
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name, multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName, dataType);
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- AttributeInfo that = (AttributeInfo) o;
- return isComposite == that.isComposite &&
- isUnique == that.isUnique &&
- isIndexable == that.isIndexable &&
- Objects.equals(name, that.name) &&
- Objects.equals(multiplicity, that.multiplicity) &&
- Objects.equals(reverseAttributeName, that.reverseAttributeName) &&
- dataType == null ? that.dataType == null : Objects.equals(dataType.getName(), that.dataType.getName());
- }
-
- public String toJson() throws JSONException {
- JSONObject json = new JSONObject();
- json.put("name", name);
- json.put("multiplicity", multiplicity.toJson());
- json.put("isComposite", isComposite);
- json.put("isUnique", isUnique);
- json.put("isIndexable", isIndexable);
- json.put("dataType", dataType.getName());
- json.put("reverseAttributeName", reverseAttributeName);
- return json.toString();
- }
-
- public static AttributeDefinition fromJson(String jsonStr) throws JSONException {
- JSONObject json = new JSONObject(jsonStr);
- String reverseAttr = null;
- if (json.has("reverseAttributeName")) {
- reverseAttr = json.getString("reverseAttributeName");
- }
- return new AttributeDefinition(json.getString("name"), json.getString("dataType"),
- Multiplicity.fromJson(json.getString("multiplicity")), json.getBoolean("isComposite"),
- json.getBoolean("isUnique"), json.getBoolean("isIndexable"), reverseAttr);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/ClassType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ClassType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ClassType.java
deleted file mode 100755
index 2f2b090..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ClassType.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableBiMap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.persistence.AtlasSystemAttributes;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import scala.tools.cmd.gen.AnyVals;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.*;
-
-public class ClassType extends HierarchicalType<ClassType, IReferenceableInstance>
- implements IConstructableType<IReferenceableInstance, ITypedReferenceableInstance> {
-
- public static final String TRAIT_NAME_SEP = "::";
-
- public final Map<AttributeInfo, List<String>> infoToNameMap;
-
- ClassType(TypeSystem typeSystem, String name, String description, ImmutableSet<String> superTypes, int numFields) {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, numFields);
- }
-
- ClassType(TypeSystem typeSystem, String name, String description, String version, ImmutableSet<String> superTypes, int numFields) {
- super(typeSystem, ClassType.class, name, description, version, superTypes, numFields);
- infoToNameMap = null;
- }
-
- ClassType(TypeSystem typeSystem, String name, String description, ImmutableSet<String> superTypes, AttributeInfo... fields)
- throws AtlasException {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, fields);
- }
-
- ClassType(TypeSystem typeSystem, String name, String description, String version, ImmutableSet<String> superTypes, AttributeInfo... fields)
- throws AtlasException {
- super(typeSystem, ClassType.class, name, description, version, superTypes, fields);
- infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
- }
-
- @Override
- public DataTypes.TypeCategory getTypeCategory() {
- return DataTypes.TypeCategory.CLASS;
- }
-
- public void validateId(Id id) throws AtlasException {
- if (id != null) {
- ClassType cType = typeSystem.getDataType(ClassType.class, id.typeName);
- if (isSubType(cType.getName())) {
- return;
- }
- throw new AtlasException(String.format("Id %s is not valid for class %s", id, getName()));
- }
- }
-
- protected Id getId(Object val) throws AtlasException {
- if (val instanceof Referenceable) {
- return ((Referenceable) val).getId();
- }
- throw new AtlasException(String.format("Cannot get id from class %s", val.getClass()));
- }
-
- @Override
- public ITypedReferenceableInstance convert(Object val, Multiplicity m) throws AtlasException {
-
- if (val != null) {
- if (val instanceof ITypedReferenceableInstance) {
- ITypedReferenceableInstance tr = (ITypedReferenceableInstance) val;
- if (!tr.getTypeName().equals(getName())) {
- /*
- * If val is a subType instance; invoke convert on it.
- */
- ClassType valType = typeSystem.getDataType(superTypeClass, tr.getTypeName());
- if (valType.superTypePaths.containsKey(name)) {
- return valType.convert(val, m);
- }
- throw new ValueConversionException(this, val);
- }
- return tr;
- } else if (val instanceof Struct) {
- Struct s = (Struct) val;
- Referenceable r = null;
- Id id = null;
-
- if (!s.typeName.equals(getName())) {
- /*
- * If val is a subType instance; invoke convert on it.
- */
- ClassType valType = typeSystem.getDataType(superTypeClass, s.typeName);
- if (valType.superTypePaths.containsKey(name)) {
- return valType.convert(s, m);
- }
- throw new ValueConversionException(this, val);
- }
-
- if (val instanceof Referenceable) {
- r = (Referenceable) val;
- id = r.getId();
- }
-
- ITypedReferenceableInstance tr =
- r != null ? createInstanceWithTraits(id, null, r, r.getTraits().toArray(new String[0])) :
- createInstance(id);
-
- if (id != null && id.isAssigned()) {
- return tr;
- }
-
- for (Map.Entry<String, AttributeInfo> e : fieldMapping.fields.entrySet()) {
- String attrKey = e.getKey();
- AttributeInfo i = e.getValue();
- Object aVal = s.get(attrKey);
- if (aVal != null && i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (!i.isComposite) {
- aVal = ((IReferenceableInstance) aVal).getId();
- }
- }
-
- if(!i.multiplicity.nullAllowed() && !s.getValuesMap().containsKey(attrKey)){
- throw new ValueConversionException.NullConversionException(i.multiplicity,
- String.format(" Value expected for required attribute %s", i.name));
- } else {
- try {
- if (s.getValuesMap().containsKey(attrKey)) {
- tr.set(attrKey, aVal);
- }
- } catch (ValueConversionException ve) {
- throw new ValueConversionException(this, val, ve);
- }
- }
- }
-
- return tr;
- } else if (val instanceof ReferenceableInstance) {
- validateId(((ReferenceableInstance) val).getId());
- return (ReferenceableInstance) val;
- } else {
- throw new ValueConversionException(this, val, "value's class is " + val.getClass().getName());
- }
- }
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
- return null;
- }
-
- @Override
- public ITypedReferenceableInstance createInstance() throws AtlasException {
- return createInstance((String[]) null);
- }
-
- public ITypedReferenceableInstance createInstance(String... traitNames) throws AtlasException {
- return createInstance(null, traitNames);
- }
-
- public ITypedReferenceableInstance createInstance(Id id, String... traitNames) throws AtlasException {
- return createInstanceWithTraits(id, null, null, traitNames);
- }
-
- public ITypedReferenceableInstance createInstance(Id id, AtlasSystemAttributes systemAttributes, String... traitNames) throws AtlasException{
- return createInstanceWithTraits(id, systemAttributes, null, traitNames);
- }
-
- public ITypedReferenceableInstance createInstanceWithTraits(Id id, AtlasSystemAttributes systemAttributes, Referenceable r, String... traitNames)
- throws AtlasException {
-
- ImmutableMap.Builder<String, ITypedStruct> b = new ImmutableBiMap.Builder<>();
- if (traitNames != null) {
- for (String t : traitNames) {
- TraitType tType = typeSystem.getDataType(TraitType.class, t);
- IStruct iTraitObject = r == null ? null : r.getTrait(t);
- ITypedStruct trait = iTraitObject == null ? tType.createInstance() :
- tType.convert(iTraitObject, Multiplicity.REQUIRED);
- b.put(t, trait);
- }
- }
-
- return new ReferenceableInstance(id == null ? new Id(getName()) : id, getName(), systemAttributes, fieldMapping,
- new boolean[fieldMapping.fields.size()], new boolean[fieldMapping.fields.size()],
- fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
- fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
- fieldMapping.numShorts == 0 ? null : new short[fieldMapping.numShorts],
- fieldMapping.numInts == 0 ? null : new int[fieldMapping.numInts],
- fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
- fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
- fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
- fieldMapping.numBigDecimals == 0 ? null : new BigDecimal[fieldMapping.numBigDecimals],
- fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
- fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
- fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
- fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
- fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
- fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
- fieldMapping.numReferenceables == 0 ? null : new ReferenceableInstance[fieldMapping.numReferenceables],
- fieldMapping.numReferenceables == 0 ? null : new Id[fieldMapping.numReferenceables], b.build());
- }
-
- @Override
- public void output(IReferenceableInstance s, Appendable buf, String prefix, Set<IReferenceableInstance> inProcess) throws AtlasException {
- fieldMapping.output(s, buf, prefix, inProcess);
- }
-
- @Override
- public List<String> getNames(AttributeInfo info) {
- return infoToNameMap.get(info);
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if( !(val instanceof ITypedReferenceableInstance)) {
- throw new IllegalArgumentException("Unexpected value type " + val.getClass().getSimpleName() + ". Expected instance of ITypedStruct");
- }
- digester.update(getName().getBytes(Charset.forName("UTF-8")));
-
- if(fieldMapping.fields != null && val != null) {
- IReferenceableInstance typedValue = (IReferenceableInstance) val;
- if(fieldMapping.fields.values() != null) {
- for (AttributeInfo aInfo : fieldMapping.fields.values()) {
- Object attrVal = typedValue.get(aInfo.name);
- if (attrVal != null) {
- aInfo.dataType().updateSignatureHash(digester, attrVal);
- }
- }
- }
- }
- }
-}
\ No newline at end of file
[28/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/MemRepository.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/MemRepository.java b/repository/src/main/java/org/apache/atlas/repository/memory/MemRepository.java
deleted file mode 100755
index aef06a4..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/MemRepository.java
+++ /dev/null
@@ -1,299 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.repository.DiscoverInstances;
-import org.apache.atlas.repository.IRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.MapIds;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDependencySorter;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.ObjectGraphWalker;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.atomic.AtomicInteger;
-
-@Deprecated
-public class MemRepository implements IRepository {
-
- final TypeSystem typeSystem;
- /*
- * A Store for each Class and Trait.
- */
- final Map<String, HierarchicalTypeStore> typeStores;
- final AtomicInteger ID_SEQ = new AtomicInteger(0);
-
- public MemRepository(TypeSystem typeSystem) {
- this.typeSystem = typeSystem;
- this.typeStores = new HashMap<>();
- }
-
- @Override
- public Id newId(String typeName) {
- return new Id("" + ID_SEQ.incrementAndGet(), 0, typeName);
- }
-
- /**
- * 1. traverse the Object Graph from i and create idToNewIdMap : Map[Id, Id],
- * also create old Id to Instance Map: oldIdToInstance : Map[Id, IInstance]
- * - traverse reference Attributes, List[ClassType], Maps where Key/value is ClassType
- * - traverse Structs
- * - traverse Traits.
- * 1b. Ensure that every newId has an associated Instance.
- * 2. Traverse oldIdToInstance map create newInstances : List[ITypedReferenceableInstance]
- * - create a ITypedReferenceableInstance.
- * replace any old References ( ids or object references) with new Ids.
- * 3. Traverse over newInstances
- * - ask ClassStore to assign a position to the Id.
- * - for Instances with Traits, assign a position for each Trait
- * - invoke store on the nwInstance.
- *
- * Recovery:
- * - on each newInstance, invoke releaseId and delete on its ClassStore and Traits' Stores.
- *
- * @param i
- * @return
- * @throws org.apache.atlas.repository.RepositoryException
- */
- public ITypedReferenceableInstance create(IReferenceableInstance i) throws RepositoryException {
-
- DiscoverInstances discoverInstances = new DiscoverInstances(this);
-
- /*
- * Step 1: traverse the Object Graph from i and create idToNewIdMap : Map[Id, Id],
- * also create old Id to Instance Map: oldIdToInstance : Map[Id, IInstance]
- * - traverse reference Attributes, List[ClassType], Maps where Key/value is ClassType
- * - traverse Structs
- * - traverse Traits.
- */
- try {
- new ObjectGraphWalker(typeSystem, discoverInstances, i).walk();
- } catch (AtlasException me) {
- throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
- }
-
- /*
- * Step 1b: Ensure that every newId has an associated Instance.
- */
- for (Id oldId : discoverInstances.idToNewIdMap.keySet()) {
- if (!discoverInstances.idToInstanceMap.containsKey(oldId)) {
- throw new RepositoryException(String.format("Invalid Object Graph: "
- + "Encountered an unassignedId %s that is not associated with an Instance", oldId));
- }
- }
-
- /* Step 2: Traverse oldIdToInstance map create newInstances :
- List[ITypedReferenceableInstance]
- * - create a ITypedReferenceableInstance.
- * replace any old References ( ids or object references) with new Ids.
- */
- List<ITypedReferenceableInstance> newInstances = new ArrayList<>();
- ITypedReferenceableInstance retInstance = null;
- Set<ClassType> classTypes = new TreeSet<>();
- Set<TraitType> traitTypes = new TreeSet<>();
- for (IReferenceableInstance transientInstance : discoverInstances.idToInstanceMap.values()) {
- try {
- ClassType cT = typeSystem.getDataType(ClassType.class, transientInstance.getTypeName());
- ITypedReferenceableInstance newInstance = cT.convert(transientInstance, Multiplicity.REQUIRED);
- newInstances.add(newInstance);
-
- classTypes.add(cT);
- for (String traitName : newInstance.getTraits()) {
- TraitType tT = typeSystem.getDataType(TraitType.class, traitName);
- traitTypes.add(tT);
- }
-
- if (newInstance.getId() == i.getId()) {
- retInstance = newInstance;
- }
-
- /*
- * Now replace old references with new Ids
- */
- MapIds mapIds = new MapIds(discoverInstances.idToNewIdMap);
- new ObjectGraphWalker(typeSystem, mapIds, newInstances).walk();
-
- } catch (AtlasException me) {
- throw new RepositoryException(
- String.format("Failed to create Instance(id = %s", transientInstance.getId()), me);
- }
- }
-
- /*
- * 3. Acquire Class and Trait Storage locks.
- * - acquire them in a stable order (super before subclass, classes before traits
- */
- for (ClassType cT : classTypes) {
- HierarchicalTypeStore st = typeStores.get(cT.getName());
- st.acquireWriteLock();
- }
-
- for (TraitType tT : traitTypes) {
- HierarchicalTypeStore st = typeStores.get(tT.getName());
- st.acquireWriteLock();
- }
-
-
- /*
- * 4. Traverse over newInstances
- * - ask ClassStore to assign a position to the Id.
- * - for Instances with Traits, assign a position for each Trait
- * - invoke store on the nwInstance.
- */
- try {
- for (ITypedReferenceableInstance instance : newInstances) {
- HierarchicalTypeStore st = typeStores.get(instance.getTypeName());
- st.assignPosition(instance.getId());
- for (String traitName : instance.getTraits()) {
- HierarchicalTypeStore tt = typeStores.get(traitName);
- tt.assignPosition(instance.getId());
- }
- }
-
- for (ITypedReferenceableInstance instance : newInstances) {
- HierarchicalTypeStore st = typeStores.get(instance.getTypeName());
- st.store((ReferenceableInstance) instance);
- for (String traitName : instance.getTraits()) {
- HierarchicalTypeStore tt = typeStores.get(traitName);
- tt.store((ReferenceableInstance) instance);
- }
- }
- } catch (RepositoryException re) {
- for (ITypedReferenceableInstance instance : newInstances) {
- HierarchicalTypeStore st = typeStores.get(instance.getTypeName());
- st.releaseId(instance.getId());
- }
- throw re;
- } finally {
- for (ClassType cT : classTypes) {
- HierarchicalTypeStore st = typeStores.get(cT.getName());
- st.releaseWriteLock();
- }
-
- for (TraitType tT : traitTypes) {
- HierarchicalTypeStore st = typeStores.get(tT.getName());
- st.releaseWriteLock();
- }
- }
-
- return retInstance;
- }
-
- public ITypedReferenceableInstance update(ITypedReferenceableInstance i) throws RepositoryException {
- throw new RepositoryException("not implemented");
- }
-
- public void delete(ITypedReferenceableInstance i) throws RepositoryException {
- throw new RepositoryException("not implemented");
- }
-
- public ITypedReferenceableInstance get(Id id) throws RepositoryException {
-
- try {
- ReplaceIdWithInstance replacer = new ReplaceIdWithInstance(this);
- ObjectGraphWalker walker = new ObjectGraphWalker(typeSystem, replacer);
- replacer.setWalker(walker);
- ITypedReferenceableInstance r = getDuringWalk(id, walker);
- walker.walk();
- return r;
- } catch (AtlasException me) {
- throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
- }
- }
-
- /*
- * - Id must be valid; Class must be valid.
- * - Ask ClassStore to createInstance.
- * - Ask ClassStore to load instance.
- * - load instance traits
- * - add to GraphWalker
- */
- ITypedReferenceableInstance getDuringWalk(Id id, ObjectGraphWalker walker) throws RepositoryException {
- ClassStore cS = getClassStore(id.getTypeName());
- if (cS == null) {
- throw new RepositoryException(String.format("Unknown Class %s", id.getTypeName()));
- }
- cS.validate(this, id);
- ReferenceableInstance r = cS.createInstance(this, id);
- cS.load(r);
- for (String traitName : r.getTraits()) {
- HierarchicalTypeStore tt = typeStores.get(traitName);
- tt.load(r);
- }
-
- walker.addRoot(r);
- return r;
- }
-
- HierarchicalTypeStore getStore(String typeName) {
- return typeStores.get(typeName);
- }
-
- ClassStore getClassStore(String typeName) {
- return (ClassStore) getStore(typeName);
- }
-
- public void defineClass(ClassType type) throws RepositoryException {
- HierarchicalTypeStore s = new ClassStore(this, type);
- typeStores.put(type.getName(), s);
- }
-
- public void defineTrait(TraitType type) throws RepositoryException {
- HierarchicalTypeStore s = new TraitStore(this, type);
- typeStores.put(type.getName(), s);
- }
-
- public void defineTypes(List<HierarchicalType> types) throws RepositoryException {
- List<TraitType> tTypes = new ArrayList<>();
- List<ClassType> cTypes = new ArrayList<>();
-
- for (HierarchicalType h : types) {
- if (h.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- tTypes.add((TraitType) h);
- } else {
- cTypes.add((ClassType) h);
- }
- }
- tTypes = HierarchicalTypeDependencySorter.sortTypes(tTypes);
- cTypes = HierarchicalTypeDependencySorter.sortTypes(cTypes);
-
- for (TraitType tT : tTypes) {
- defineTrait(tT);
- }
-
- for (ClassType cT : cTypes) {
- defineClass(cT);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/ReplaceIdWithInstance.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/ReplaceIdWithInstance.java b/repository/src/main/java/org/apache/atlas/repository/memory/ReplaceIdWithInstance.java
deleted file mode 100755
index 6741eb8..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/ReplaceIdWithInstance.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.ObjectGraphWalker;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-
-public class ReplaceIdWithInstance implements ObjectGraphWalker.NodeProcessor {
-
- public final Map<Id, ITypedReferenceableInstance> idToInstanceMap;
- final MemRepository repository;
- ObjectGraphWalker walker;
-
- public ReplaceIdWithInstance(MemRepository repository) {
- this.repository = repository;
- idToInstanceMap = new HashMap<>();
- }
-
- void setWalker(ObjectGraphWalker walker) {
- this.walker = walker;
- }
-
- @Override
- public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
- if (nd.attributeName != null) {
- if (nd.aInfo.isComposite && nd.value != null) {
- if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (nd.value instanceof Id) {
- Id id = (Id) nd.value;
- ITypedReferenceableInstance r = getInstance(id);
- nd.instance.set(nd.attributeName, r);
- }
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- DataTypes.ArrayType aT = (DataTypes.ArrayType) nd.aInfo.dataType();
- nd.instance.set(nd.attributeName,
- convertToInstances((ImmutableCollection) nd.value, nd.aInfo.multiplicity, aT));
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- DataTypes.MapType mT = (DataTypes.MapType) nd.aInfo.dataType();
- nd.instance.set(nd.attributeName, convertToInstances((ImmutableMap) nd.value, nd.aInfo.multiplicity, mT));
- }
- }
- }
- }
-
- ImmutableCollection<?> convertToInstances(ImmutableCollection<?> val, Multiplicity m, DataTypes.ArrayType arrType)
- throws AtlasException {
-
- if (val == null || arrType.getElemType().getTypeCategory() != DataTypes.TypeCategory.CLASS) {
- return val;
- }
-
- ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
- for (Object elem : val) {
- if (elem instanceof Id) {
- Id id = (Id) elem;
- elem = getInstance(id);
- }
-
- b.add(elem);
-
- }
- return b.build();
- }
-
- ImmutableMap<?, ?> convertToInstances(ImmutableMap val, Multiplicity m, DataTypes.MapType mapType)
- throws AtlasException {
-
- if (val == null || (mapType.getKeyType().getTypeCategory() != DataTypes.TypeCategory.CLASS
- && mapType.getValueType().getTypeCategory() != DataTypes.TypeCategory.CLASS)) {
- return val;
- }
- ImmutableMap.Builder b = ImmutableMap.builder();
- for (Map.Entry elem : (Iterable<Map.Entry>) val.entrySet()) {
- Object oldKey = elem.getKey();
- Object oldValue = elem.getValue();
- Object newKey = oldKey;
- Object newValue = oldValue;
-
- if (oldKey instanceof Id) {
- Id id = (Id) elem;
- ITypedReferenceableInstance r = getInstance(id);
- }
-
- if (oldValue instanceof Id) {
- Id id = (Id) elem;
- ITypedReferenceableInstance r = getInstance(id);
- }
-
- b.put(newKey, newValue);
- }
- return b.build();
- }
-
- ITypedReferenceableInstance getInstance(Id id) throws AtlasException {
-
- ITypedReferenceableInstance r = idToInstanceMap.get(id);
- if (r == null) {
- r = repository.get(id);
- idToInstanceMap.put(id, r);
- walker.addRoot(r);
- }
- return r;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/StructStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/StructStore.java b/repository/src/main/java/org/apache/atlas/repository/memory/StructStore.java
deleted file mode 100755
index 69a18a5..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/StructStore.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import com.google.common.collect.ImmutableBiMap;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.StructType;
-
-import java.util.Collection;
-import java.util.Map;
-
-@Deprecated
-public class StructStore extends AttributeStores.AbstractAttributeStore implements IAttributeStore {
-
- final StructType structType;
- final ImmutableMap<AttributeInfo, IAttributeStore> attrStores;
-
- StructStore(AttributeInfo aInfo) throws RepositoryException {
- super(aInfo);
- this.structType = (StructType) aInfo.dataType();
- ImmutableMap.Builder<AttributeInfo, IAttributeStore> b = new ImmutableBiMap.Builder<>();
- Collection<AttributeInfo> l = structType.fieldMapping.fields.values();
- for (AttributeInfo i : l) {
- b.put(i, AttributeStores.createStore(i));
- }
- attrStores = b.build();
-
- }
-
- @Override
- protected void store(StructInstance instance, int colPos, int pos) throws RepositoryException {
- StructInstance s = instance.structs[colPos];
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.store(pos, structType, s);
- }
- }
-
- @Override
- protected void load(StructInstance instance, int colPos, int pos) throws RepositoryException {
- StructInstance s = (StructInstance) structType.createInstance();
- instance.structs[colPos] = s;
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.load(pos, structType, s);
- }
- }
-
- @Override
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.structs[colPos]);
- }
-
- @Override
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.structs[colPos] = (StructInstance) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.ensureCapacity(pos);
- }
- nullList.size(pos + 1);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/TraitStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/TraitStore.java b/repository/src/main/java/org/apache/atlas/repository/memory/TraitStore.java
deleted file mode 100755
index 0cbb32d..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/TraitStore.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import org.apache.atlas.typesystem.types.TraitType;
-
-import java.util.ArrayList;
-
-@Deprecated
-public class TraitStore extends HierarchicalTypeStore {
-
- final ArrayList<String> classNameStore;
-
- public TraitStore(MemRepository repository, TraitType hierarchicalType) throws RepositoryException {
- super(repository, hierarchicalType);
- classNameStore = new ArrayList<>();
- }
-
- void store(ReferenceableInstance i) throws RepositoryException {
- int pos = idPosMap.get(i.getId());
- StructInstance s = (StructInstance) i.getTrait(hierarchicalType.getName());
- super.storeFields(pos, s);
- classNameStore.set(pos, i.getTypeName());
- }
-
- void load(ReferenceableInstance i) throws RepositoryException {
- int pos = idPosMap.get(i.getId());
- StructInstance s = (StructInstance) i.getTrait(hierarchicalType.getName());
- super.loadFields(pos, s);
- }
-
- public void ensureCapacity(int pos) throws RepositoryException {
- super.ensureCapacity(pos);
- while (classNameStore.size() < pos + 1) {
- classNameStore.add(null);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java
index 5a0b74e..0fe35b6 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/AtlasEntityStore.java
@@ -32,6 +32,15 @@ import java.util.Map;
* Persistence/Retrieval API for AtlasEntity
*/
public interface AtlasEntityStore {
+
+ /**
+ * List all the entity guids for a given typename
+ * @param typename
+ * @return
+ * @throws AtlasBaseException
+ */
+ List<String> getEntityGUIDS(String typename) throws AtlasBaseException;
+
/**
*
* Get entity definition by its guid
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityChangeNotifier.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityChangeNotifier.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityChangeNotifier.java
index 7b349c4..4c511c1 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityChangeNotifier.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityChangeNotifier.java
@@ -26,13 +26,13 @@ import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.model.instance.EntityMutations.EntityOperation;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
import org.apache.atlas.repository.graph.FullTextMapperV2;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.util.AtlasRepositoryConfiguration;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
@@ -74,7 +74,7 @@ public class AtlasEntityChangeNotifier {
List<AtlasEntityHeader> partiallyUpdatedEntities = entityMutationResponse.getPartialUpdatedEntities();
List<AtlasEntityHeader> deletedEntities = entityMutationResponse.getDeletedEntities();
- // complete full text mapping before calling toITypedReferenceable(), from notifyListners(), to
+ // complete full text mapping before calling toReferenceables(), from notifyListners(), to
// include all vertex updates in the current graph-transaction
doFullTextMapping(createdEntities);
doFullTextMapping(updatedEntities);
@@ -91,8 +91,8 @@ public class AtlasEntityChangeNotifier {
// appended to the existing fullText
updateFullTextMapping(entityId, classifications);
- ITypedReferenceableInstance entity = toITypedReferenceable(entityId);
- List<ITypedStruct> traits = toITypedStructs(classifications);
+ Referenceable entity = toReferenceable(entityId);
+ List<Struct> traits = toStruct(classifications);
if (entity == null || CollectionUtils.isEmpty(traits)) {
return;
@@ -111,7 +111,7 @@ public class AtlasEntityChangeNotifier {
// Since the entity has already been modified in the graph, we need to recursively remap the entity
doFullTextMapping(entityId);
- ITypedReferenceableInstance entity = toITypedReferenceable(entityId);
+ Referenceable entity = toReferenceable(entityId);
if (entity == null || CollectionUtils.isEmpty(traitNames)) {
return;
@@ -130,8 +130,8 @@ public class AtlasEntityChangeNotifier {
// Since the classification attributes are updated in the graph, we need to recursively remap the entityText
doFullTextMapping(entityId);
- ITypedReferenceableInstance entity = toITypedReferenceable(entityId);
- List<ITypedStruct> traits = toITypedStructs(classifications);
+ Referenceable entity = toReferenceable(entityId);
+ List<Struct> traits = toStruct(classifications);
if (entity == null || CollectionUtils.isEmpty(traits)) {
return;
@@ -155,7 +155,7 @@ public class AtlasEntityChangeNotifier {
return;
}
- List<ITypedReferenceableInstance> typedRefInsts = toITypedReferenceable(entityHeaders);
+ List<Referenceable> typedRefInsts = toReferenceables(entityHeaders);
for (EntityChangeListener listener : entityChangeListeners) {
try {
@@ -177,28 +177,28 @@ public class AtlasEntityChangeNotifier {
}
}
- private List<ITypedReferenceableInstance> toITypedReferenceable(List<AtlasEntityHeader> entityHeaders) throws AtlasBaseException {
- List<ITypedReferenceableInstance> ret = new ArrayList<>(entityHeaders.size());
+ private List<Referenceable> toReferenceables(List<AtlasEntityHeader> entityHeaders) throws AtlasBaseException {
+ List<Referenceable> ret = new ArrayList<>(entityHeaders.size());
for (AtlasEntityHeader entityHeader : entityHeaders) {
- ret.add(instanceConverter.getITypedReferenceable(entityHeader.getGuid()));
+ ret.add(toReferenceable(entityHeader.getGuid()));
}
return ret;
}
- private ITypedReferenceableInstance toITypedReferenceable(String entityId) throws AtlasBaseException {
- ITypedReferenceableInstance ret = null;
+ private Referenceable toReferenceable(String entityId) throws AtlasBaseException {
+ Referenceable ret = null;
if (StringUtils.isNotEmpty(entityId)) {
- ret = instanceConverter.getITypedReferenceable(entityId);
+ ret = instanceConverter.getReferenceable(entityId);
}
return ret;
}
- private List<ITypedStruct> toITypedStructs(List<AtlasClassification> classifications) throws AtlasBaseException {
- List<ITypedStruct> ret = null;
+ private List<Struct> toStruct(List<AtlasClassification> classifications) throws AtlasBaseException {
+ List<Struct> ret = null;
if (classifications != null) {
ret = new ArrayList<>(classifications.size());
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
index a5db81b..89bf7dc 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1.java
@@ -77,6 +77,26 @@ public class AtlasEntityStoreV1 implements AtlasEntityStore {
@Override
@GraphTransaction
+ public List<String> getEntityGUIDS(final String typename) throws AtlasBaseException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("==> getEntityGUIDS({})", typename);
+ }
+
+ if (StringUtils.isEmpty(typename)) {
+ throw new AtlasBaseException(AtlasErrorCode.UNKNOWN_TYPENAME);
+ }
+
+ List<String> ret = AtlasGraphUtilsV1.findEntityGUIDsByType(typename);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("<== getEntityGUIDS({})", typename);
+ }
+
+ return ret;
+ }
+
+ @Override
+ @GraphTransaction
public AtlasEntityWithExtInfo getById(String guid) throws AtlasBaseException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> getById({})", guid);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasGraphUtilsV1.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasGraphUtilsV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasGraphUtilsV1.java
index 1eb4183..6a6ac60 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasGraphUtilsV1.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasGraphUtilsV1.java
@@ -43,9 +43,12 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.Date;
import java.util.Iterator;
+import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -325,6 +328,22 @@ public class AtlasGraphUtilsV1 {
return vertex;
}
+ public static List<String> findEntityGUIDsByType(String typename) {
+ AtlasGraphQuery query = AtlasGraphProvider.getGraphInstance().query()
+ .has(Constants.ENTITY_TYPE_PROPERTY_KEY, typename);
+ Iterator<AtlasVertex> results = query.vertices().iterator();
+ if (!results.hasNext()) {
+ return Collections.emptyList();
+ }
+
+ ArrayList<String> entityList = new ArrayList<>();
+ while (results.hasNext()) {
+ entityList.add(getIdFromVertex(results.next()));
+ }
+
+ return entityList;
+ }
+
public static boolean relationshipTypeHasInstanceEdges(String typeName) throws AtlasBaseException {
AtlasGraphQuery query = AtlasGraphProvider.getGraphInstance()
.query()
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasStructDefStoreV1.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasStructDefStoreV1.java b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasStructDefStoreV1.java
index 86f1b88..f1d9031 100644
--- a/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasStructDefStoreV1.java
+++ b/repository/src/main/java/org/apache/atlas/repository/store/graph/v1/AtlasStructDefStoreV1.java
@@ -23,6 +23,7 @@ import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
+import org.apache.atlas.v1.model.typedef.AttributeDefinition;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.AtlasVertex;
@@ -31,12 +32,9 @@ import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.AttributeInfo;
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
-import org.codehaus.jettison.json.JSONException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -547,15 +545,9 @@ public class AtlasStructDefStoreV1 extends AtlasAbstractDefStoreV1<AtlasStructDe
}
public static AttributeDefinition toAttributeDefintion(AtlasAttribute attribute) {
- AttributeDefinition ret = null;
-
String jsonString = toJsonFromAttribute(attribute);
- try {
- ret = AttributeInfo.fromJson(jsonString);
- } catch (JSONException excp) {
- LOG.error("failed in converting to AttributeDefinition: " + jsonString, excp);
- }
+ AttributeDefinition ret = AtlasType.fromV1Json(jsonString, AttributeDefinition.class);
return ret;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java b/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java
deleted file mode 100644
index 2dd339c..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/GraphBackedTypeStore.java
+++ /dev/null
@@ -1,394 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import com.google.common.base.Function;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.codehaus.jettison.json.JSONException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.atlas.repository.graph.GraphHelper.setProperty;
-
-@Singleton
-@Component
-@Deprecated
-public class GraphBackedTypeStore implements ITypeStore {
- public static final String VERTEX_TYPE = "typeSystem";
- private static final String PROPERTY_PREFIX = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "type.";
- public static final String SUPERTYPE_EDGE_LABEL = PROPERTY_PREFIX + ".supertype";
-
- private static Logger LOG = LoggerFactory.getLogger(GraphBackedTypeStore.class);
-
- private final AtlasGraph graph;
-
- private GraphHelper graphHelper = GraphHelper.getInstance();
-
- @Inject
- public GraphBackedTypeStore(AtlasGraph atlasGraph) {
- this.graph = atlasGraph;
- }
-
- @Override
- @GraphTransaction
- public void store(TypeSystem typeSystem, ImmutableList<String> typeNames) throws AtlasException {
-
- //Pre-create the vertices that are needed for the types. This allows us to execute
- //one query to determine all of the vertices that already exist.
- Map<String, AtlasVertex> typeVertices = getOrCreateTypeVertices(typeSystem, typeNames);
-
- //Complete the storage process by adding properties and edges to the vertices
- //that were created.
- TypePersistenceVisitor visitor = new TypePersistenceVisitor(this, typeVertices, typeSystem);
- processTypes(typeNames, typeSystem, visitor);
- }
-
- private void processTypes(ImmutableList<String> typeNames, TypeSystem typeSystem, TypeVisitor visitor) throws AtlasException {
- for (String typeName : typeNames) {
- IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
- LOG.debug("Processing {}.{}.{} in type store", dataType.getTypeCategory(), dataType.getName(), dataType.getDescription());
- switch (dataType.getTypeCategory()) {
- case ENUM:
- visitor.visitEnumeration((EnumType)dataType);
- break;
-
- case STRUCT:
- StructType structType = (StructType) dataType;
- processType(typeSystem, dataType.getTypeCategory(), dataType.getName(), dataType.getDescription(),
- ImmutableList.copyOf(structType.infoToNameMap.keySet()), ImmutableSet.<String>of(), visitor);
- break;
-
- case TRAIT:
- case CLASS:
- HierarchicalType type = (HierarchicalType) dataType;
- processType(typeSystem, dataType.getTypeCategory(), dataType.getName(), type.getDescription(), type.immediateAttrs,
- type.superTypes, visitor);
- break;
-
- default: //Ignore primitive/collection types as they are covered under references
- break;
- }
- }
- }
-
- private Map<String, AtlasVertex> getOrCreateTypeVertices(TypeSystem typeSystem, ImmutableList<String> typeNames) throws AtlasException {
-
- //examine the types to determine what type vertices are needed
- TypeVertexFinder vertexFinder = new TypeVertexFinder(typeSystem);
- processTypes(typeNames, typeSystem, vertexFinder);
- List<TypeVertexInfo> typeVerticesNeeded = vertexFinder.getVerticesToCreate();
-
- //find or create the type vertices
- List<AtlasVertex> vertices = createVertices(typeVerticesNeeded);
-
- //Create a type name->AtlasVertex map with the result
- Map<String, AtlasVertex> result = new HashMap<>(typeVerticesNeeded.size());
- for(int i = 0 ; i < typeVerticesNeeded.size(); i++) {
- TypeVertexInfo createdVertexInfo = typeVerticesNeeded.get(i);
- AtlasVertex createdVertex = vertices.get(i);
- result.put(createdVertexInfo.getTypeName(), createdVertex);
- }
- return result;
-
- }
-
-
- static String getPropertyKey(String name) {
- return PROPERTY_PREFIX + name;
- }
-
- static String getPropertyKey(String parent, String child) {
- return PROPERTY_PREFIX + parent + "." + child;
- }
-
- static String getEdgeLabel(String parent, String child) {
- return PROPERTY_PREFIX + "edge." + parent + "." + child;
- }
-
- private void processType(TypeSystem typeSystem, DataTypes.TypeCategory category, String typeName, String typeDescription,
- ImmutableList<AttributeInfo> attributes, ImmutableSet<String> superTypes, TypeVisitor visitor) throws AtlasException {
-
- visitor.visitDataType(category, typeName, typeDescription);
-
- List<String> attrNames = new ArrayList<>();
- if (attributes != null) {
- for (AttributeInfo attribute : attributes) {
- visitor.visitAttribute(typeName, attribute);
- attrNames.add(attribute.name);
- processsAttribute(typeSystem, typeName, attribute, visitor);
- }
- }
- visitor.visitAttributeNames(typeName, attrNames);
-
- //Add edges for hierarchy
- if (superTypes != null) {
- for (String superTypeName : superTypes) {
- visitor.visitSuperType(typeName, superTypeName);
- }
- }
- }
-
- private void processsAttribute(TypeSystem typeSystem, String typeName, AttributeInfo attribute, TypeVisitor visitor)
- throws AtlasException {
-
- ImmutableList<String> coreTypes = typeSystem.getCoreTypes();
- List<IDataType> attrDataTypes = new ArrayList<>();
- IDataType attrDataType = attribute.dataType();
-
-
- switch (attrDataType.getTypeCategory()) {
- case ARRAY:
- String attrType = TypeUtils.parseAsArrayType(attrDataType.getName());
- if(attrType != null) {
- IDataType elementType = typeSystem.getDataType(IDataType.class, attrType);
- attrDataTypes.add(elementType);
- }
- break;
-
- case MAP:
- String[] attrTypes = TypeUtils.parseAsMapType(attrDataType.getName());
- if(attrTypes != null && attrTypes.length > 1) {
- IDataType keyType = typeSystem.getDataType(IDataType.class, attrTypes[0]);
- IDataType valueType = typeSystem.getDataType(IDataType.class, attrTypes[1]);
- attrDataTypes.add(keyType);
- attrDataTypes.add(valueType);
- }
- break;
-
- case ENUM:
- case STRUCT:
- case CLASS:
- attrDataTypes.add(attrDataType);
- break;
-
- case PRIMITIVE: //no vertex for primitive type, hence no edge required
- break;
-
- default:
- throw new IllegalArgumentException(
- "Attribute cannot reference instances of type : " + attrDataType.getTypeCategory());
- }
-
-
- for (IDataType attrType : attrDataTypes) {
- if (!coreTypes.contains(attrType.getName())) {
- visitor.visitAttributeDataType(typeName, attribute, attrType);
- }
- }
- }
-
- @Override
- @GraphTransaction
- public TypesDef restore() throws AtlasException {
- //Get all vertices for type system
- Iterator vertices =
- graph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).vertices().iterator();
-
- return getTypesFromVertices(vertices);
- }
-
- @Override
- @GraphTransaction
- public TypesDef restoreType(String typeName) throws AtlasException {
- // Get AtlasVertex for the specified type name.
- Iterator vertices =
- graph.query().has(Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE).has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
-
- return getTypesFromVertices(vertices);
- }
-
- private TypesDef getTypesFromVertices(Iterator<AtlasVertex> vertices) throws AtlasException {
- ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
- ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
- ImmutableList.Builder<HierarchicalTypeDefinition<ClassType>> classTypes = ImmutableList.builder();
- ImmutableList.Builder<HierarchicalTypeDefinition<TraitType>> traits = ImmutableList.builder();
-
- while (vertices.hasNext()) {
- AtlasVertex vertex = vertices.next();
- DataTypes.TypeCategory typeCategory = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPE_CATEGORY_PROPERTY_KEY, TypeCategory.class);
- String typeName = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPENAME_PROPERTY_KEY, String.class);
- String typeDescription = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPEDESCRIPTION_PROPERTY_KEY, String.class);
- LOG.info("Restoring type {}.{}.{}", typeCategory, typeName, typeDescription);
- switch (typeCategory) {
- case ENUM:
- enums.add(getEnumType(vertex));
- break;
-
- case STRUCT:
- AttributeDefinition[] attributes = getAttributes(vertex, typeName);
- structs.add(new StructTypeDefinition(typeName, typeDescription, attributes));
- break;
-
- case CLASS:
- ImmutableSet<String> superTypes = getSuperTypes(vertex);
- attributes = getAttributes(vertex, typeName);
- classTypes.add(new HierarchicalTypeDefinition(ClassType.class, typeName, typeDescription, superTypes, attributes));
- break;
-
- case TRAIT:
- superTypes = getSuperTypes(vertex);
- attributes = getAttributes(vertex, typeName);
- traits.add(new HierarchicalTypeDefinition(TraitType.class, typeName, typeDescription, superTypes, attributes));
- break;
-
- case RELATIONSHIP:
- // v1 typesystem is not notified on new relation type
- break;
-
- default:
- throw new IllegalArgumentException("Unhandled type category " + typeCategory);
- }
- }
- return TypesUtil.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
- }
-
- private EnumTypeDefinition getEnumType(AtlasVertex vertex) throws AtlasException {
- String typeName = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPENAME_PROPERTY_KEY, String.class);
- String typeDescription = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPEDESCRIPTION_PROPERTY_KEY, String.class);
- List<EnumValue> enumValues = new ArrayList<>();
- List<String> values = GraphHelper.getListProperty(vertex, getPropertyKey(typeName));
- for (String value : values) {
- String valueProperty = getPropertyKey(typeName, value);
- enumValues.add(new EnumValue(value, GraphHelper.getSingleValuedProperty(vertex, valueProperty, Integer.class)));
- }
- return new EnumTypeDefinition(typeName, typeDescription, enumValues.toArray(new EnumValue[enumValues.size()]));
- }
-
- private ImmutableSet<String> getSuperTypes(AtlasVertex vertex) {
- Set<String> superTypes = new HashSet<>();
- for (AtlasEdge edge : (Iterable<AtlasEdge>) vertex.getEdges(AtlasEdgeDirection.OUT, SUPERTYPE_EDGE_LABEL)) {
- superTypes.add(edge.getInVertex().getProperty(Constants.TYPENAME_PROPERTY_KEY, String.class));
- }
- return ImmutableSet.copyOf(superTypes);
- }
-
- private AttributeDefinition[] getAttributes(AtlasVertex vertex, String typeName) throws AtlasException {
- List<AttributeDefinition> attributes = new ArrayList<>();
- List<String> attrNames = GraphHelper.getListProperty(vertex, getPropertyKey(typeName));
- if (attrNames != null) {
- for (String attrName : attrNames) {
- try {
- String encodedPropertyKey = GraphHelper.encodePropertyKey(getPropertyKey(typeName, attrName));
- AttributeDefinition attrValue = AttributeInfo.fromJson((String) vertex.getJsonProperty(encodedPropertyKey));
- if (attrValue != null)
- {
- attributes.add(attrValue);
- }
- } catch (JSONException e) {
- throw new AtlasException(e);
- }
- }
- }
- return attributes.toArray(new AttributeDefinition[attributes.size()]);
- }
-
- /**
- * Find vertex for the given type category and name, else create new vertex
- * @param category
- * @param typeName
- * @return vertex
- */
- AtlasVertex findVertex(DataTypes.TypeCategory category, String typeName) {
- LOG.debug("Finding AtlasVertex for {}.{}", category, typeName);
-
- Iterator results = graph.query().has(Constants.TYPENAME_PROPERTY_KEY, typeName).vertices().iterator();
- AtlasVertex vertex = null;
- if (results != null && results.hasNext()) {
- //There should be just one AtlasVertex with the given typeName
- vertex = (AtlasVertex) results.next();
- }
- return vertex;
- }
-
- //package-private for testing
- Map<String, AtlasVertex> findVertices(List<String> typeNames) throws RepositoryException {
- LOG.debug("Finding vertices for {}", typeNames.toString());
- Map<String, AtlasVertex> foundVertices = graphHelper.getVerticesForPropertyValues(Constants.TYPENAME_PROPERTY_KEY, typeNames);
- return foundVertices;
-
- }
-
-
- /**
- * Finds or creates type vertices with the information specified.
- *
- * @param infoList
- * @return list with the vertices corresponding to the types in the list.
- * @throws AtlasException
- */
- private List<AtlasVertex> createVertices(List<TypeVertexInfo> infoList) throws AtlasException {
-
- List<AtlasVertex> result = new ArrayList<>(infoList.size());
- List<String> typeNames = Lists.transform(infoList, new Function<TypeVertexInfo,String>() {
- @Override
- public String apply(TypeVertexInfo input) {
- return input.getTypeName();
- }
- });
- Map<String, AtlasVertex> vertices = findVertices(typeNames);
-
- for(TypeVertexInfo info : infoList) {
- AtlasVertex vertex = vertices.get(info.getTypeName());
- if (! GraphHelper.elementExists(vertex)) {
- LOG.debug("Adding vertex {}{}", PROPERTY_PREFIX, info.getTypeName());
- vertex = graph.addVertex();
- setProperty(vertex, Constants.VERTEX_TYPE_PROPERTY_KEY, VERTEX_TYPE); // Mark as type AtlasVertex
- setProperty(vertex, Constants.TYPE_CATEGORY_PROPERTY_KEY, info.getCategory());
- setProperty(vertex, Constants.TYPENAME_PROPERTY_KEY, info.getTypeName());
- }
- String newDescription = info.getTypeDescription();
- if (newDescription != null) {
- String oldDescription = getPropertyKey(Constants.TYPEDESCRIPTION_PROPERTY_KEY);
- if (!newDescription.equals(oldDescription)) {
- setProperty(vertex, Constants.TYPEDESCRIPTION_PROPERTY_KEY, newDescription);
- }
- } else {
- LOG.debug(" type description is null ");
- }
- result.add(vertex);
- }
- return result;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/ITypeStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/ITypeStore.java b/repository/src/main/java/org/apache/atlas/repository/typestore/ITypeStore.java
deleted file mode 100755
index 84779f4..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/ITypeStore.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-@Deprecated
-public interface ITypeStore {
-
- /**
- * Add types to the underlying type storage layer
- * @param typeSystem {@link TypeSystem} object which contains existing types. To lookup newly added types,
- * an instance of {@link TypeSystem.TransientTypeSystem} can be passed.
- * @param types names of newly added types.
- * @throws AtlasException
- */
- void store(TypeSystem typeSystem, ImmutableList<String> types) throws AtlasException;
-
- /**
- * Restore all type definitions
- * @return List of persisted type definitions
- * @throws AtlasException
- */
- TypesDef restore() throws AtlasException;
-
- /**
- * Restore the specified type definition
- *
- * @param typeName name of requested type
- * @return persisted type definition
- * @throws AtlasException
- */
- TypesDef restoreType(String typeName) throws AtlasException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/StorageException.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/StorageException.java b/repository/src/main/java/org/apache/atlas/repository/typestore/StorageException.java
deleted file mode 100755
index c4f64f8..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/StorageException.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import org.apache.atlas.AtlasException;
-
-public class StorageException extends AtlasException {
- public StorageException(String type) {
- super("Failure in typesystem storage for type " + type);
- }
-
- public StorageException(String type, Throwable cause) {
- super("Failure in typesystem storage for type " + type, cause);
- }
-
- public StorageException(Throwable cause) {
- super("Failure in type system storage", cause);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/StoreBackedTypeCache.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/StoreBackedTypeCache.java b/repository/src/main/java/org/apache/atlas/repository/typestore/StoreBackedTypeCache.java
deleted file mode 100644
index f472fa6..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/StoreBackedTypeCache.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.typestore;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.ConditionalOnAtlasProperty;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.TypeSystem.TransientTypeSystem;
-import org.apache.atlas.typesystem.types.TypeUtils;
-import org.apache.atlas.typesystem.types.cache.DefaultTypeCache;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-
-/**
- * An extension of {@link DefaultTypeCache} which loads
- * the requested type from the type store if it is not found in the cache,
- * and adds it to the cache if it's found in the store.
- * Any attribute and super types that are required by the requested type
- * are also loaded from the store if they are not already in the cache.
- */
-@Singleton
-@Component
-@Deprecated
-@ConditionalOnAtlasProperty(property = "atlas.TypeCache.impl")
-public class StoreBackedTypeCache extends DefaultTypeCache {
-
- private ITypeStore typeStore;
-
- private ImmutableList<String> coreTypes;
- private TypeSystem typeSystem;
-
- @Inject
- public StoreBackedTypeCache(final ITypeStore typeStore) {
- this.typeStore = typeStore;
- typeSystem = TypeSystem.getInstance();
- coreTypes = typeSystem.getCoreTypes();
- }
-
- private static class Context {
- ImmutableList.Builder<EnumTypeDefinition> enums = ImmutableList.builder();
- ImmutableList.Builder<StructTypeDefinition> structs = ImmutableList.builder();
- ImmutableList.Builder<HierarchicalTypeDefinition<ClassType>> classTypes = ImmutableList.builder();
- ImmutableList.Builder<HierarchicalTypeDefinition<TraitType>> traits = ImmutableList.builder();
- Set<String> loadedFromStore = new HashSet<>();
-
- public void addTypesDefToLists(TypesDef typesDef) {
-
- List<EnumTypeDefinition> enumTypesAsJavaList = typesDef.enumTypesAsJavaList();
- enums.addAll(enumTypesAsJavaList);
- for (EnumTypeDefinition etd : enumTypesAsJavaList) {
- loadedFromStore.add(etd.name);
- }
- List<StructTypeDefinition> structTypesAsJavaList = typesDef.structTypesAsJavaList();
- structs.addAll(structTypesAsJavaList);
- for (StructTypeDefinition std : structTypesAsJavaList) {
- loadedFromStore.add(std.typeName);
- }
- List<HierarchicalTypeDefinition<ClassType>> classTypesAsJavaList = typesDef.classTypesAsJavaList();
- classTypes.addAll(classTypesAsJavaList);
- for (HierarchicalTypeDefinition<ClassType> classTypeDef : classTypesAsJavaList) {
- loadedFromStore.add(classTypeDef.typeName);
- }
- List<HierarchicalTypeDefinition<TraitType>> traitTypesAsJavaList = typesDef.traitTypesAsJavaList();
- traits.addAll(traitTypesAsJavaList);
- for (HierarchicalTypeDefinition<TraitType> traitTypeDef : traitTypesAsJavaList) {
- loadedFromStore.add(traitTypeDef.typeName);
- }
- }
-
- public boolean isLoadedFromStore(String typeName) {
- return loadedFromStore.contains(typeName);
- }
-
- public TypesDef getTypesDef() {
- return TypesUtil.getTypesDef(enums.build(), structs.build(), traits.build(), classTypes.build());
- }
- }
-
- /**
- * Checks whether the specified type is cached in memory and does *not*
- * access the type store. Used for testing.
- *
- * @param typeName
- * @return
- */
- public boolean isCachedInMemory(String typeName) throws AtlasException {
- return super.has(typeName);
- }
-
- /**
- * Check the type store for the requested type.
- * If found in the type store, the type and any required super and attribute types
- * are loaded from the type store, and added to the cache.
- */
- @Override
- public IDataType onTypeFault(String typeName) throws AtlasException {
-
- // Type is not cached - check the type store.
- // Any super and attribute types needed by the requested type
- // which are not cached will also be loaded from the store.
- Context context = new Context();
- TypesDef typesDef = getTypeFromStore(typeName, context);
- if (typesDef.isEmpty()) {
- // Type not found in the type store.
- return null;
- }
-
- // Add all types that were loaded from the store to the cache.
- TransientTypeSystem transientTypeSystem = typeSystem.createTransientTypeSystem(context.getTypesDef(), false);
- Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
- putAll(typesAdded.values());
- return typesAdded.get(typeName);
- }
-
- private void getTypeFromCacheOrStore(String typeName, Context context)
- throws AtlasException {
-
- if (coreTypes.contains(typeName) || super.has(typeName)) {
- return;
- }
-
- if (context.isLoadedFromStore(typeName)) {
- return;
- }
-
- // Type not cached and hasn't been loaded during this operation, so check the store.
- TypesDef typesDef = getTypeFromStore(typeName, context);
- if (typesDef.isEmpty()) {
- // Attribute type not found in cache or store.
- throw new AtlasException(typeName + " not found in type store");
- }
- }
-
- private TypesDef getTypeFromStore(String typeName, Context context)
- throws AtlasException {
-
- TypesDef typesDef = typeStore.restoreType(typeName);
- if (!typesDef.isEmpty()) {
- // Type found in store, add it to lists.
- context.addTypesDefToLists(typesDef);
-
- // Check the attribute and super types that are
- // used by the requested type, and restore them
- // as needed.
- checkAttributeAndSuperTypes(typesDef, context);
- }
- return typesDef;
- }
-
- private void checkAttributeAndSuperTypes(TypesDef typesDef, Context context)
- throws AtlasException {
-
- // Check the cache and store for attribute types and super types.
- for (HierarchicalTypeDefinition<ClassType> classTypeDef : typesDef.classTypesAsJavaList()) {
- checkAttributeTypes(classTypeDef.attributeDefinitions, context);
- for (String superTypeName : classTypeDef.superTypes) {
- getTypeFromCacheOrStore(superTypeName, context);
- }
- }
- for (HierarchicalTypeDefinition<TraitType> traitTypeDef : typesDef.traitTypesAsJavaList()) {
- checkAttributeTypes(traitTypeDef.attributeDefinitions, context);
- for (String superTypeName : traitTypeDef.superTypes) {
- getTypeFromCacheOrStore(superTypeName, context);
- }
- }
- for (StructTypeDefinition structTypeDef : typesDef.structTypesAsJavaList()) {
- checkAttributeTypes(structTypeDef.attributeDefinitions, context);
- }
- }
-
- private void checkAttributeTypes(AttributeDefinition[] attributeDefinitions,
- Context context) throws AtlasException {
-
- for (AttributeDefinition attrDef : attributeDefinitions) {
- checkAttributeType(attrDef, context);
- }
- }
-
- private void checkAttributeType(AttributeDefinition attrDef, Context context) throws AtlasException {
-
- List<String> typeNamesToLookup = new ArrayList<>(2);
-
- // Get the attribute type(s).
- String elementTypeName = TypeUtils.parseAsArrayType(attrDef.dataTypeName);
- if (elementTypeName != null) {
- // Array attribute, lookup the element type.
- typeNamesToLookup.add(elementTypeName);
- }
- else {
- String[] mapTypeNames = TypeUtils.parseAsMapType(attrDef.dataTypeName);
- if (mapTypeNames != null) {
- // Map attribute, lookup the key and value types.
- typeNamesToLookup.addAll(Arrays.asList(mapTypeNames));
- }
- else {
- // Not an array or map, lookup the attribute type.
- typeNamesToLookup.add(attrDef.dataTypeName);
- }
- }
-
- for (String typeName : typeNamesToLookup) {
- getTypeFromCacheOrStore(typeName, context);
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/TypePersistenceVisitor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/TypePersistenceVisitor.java b/repository/src/main/java/org/apache/atlas/repository/typestore/TypePersistenceVisitor.java
deleted file mode 100644
index bfb1bfc..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/TypePersistenceVisitor.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.typestore;
-
-import static org.apache.atlas.repository.graph.GraphHelper.setProperty;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.codehaus.jettison.json.JSONException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * TypeVisitor implementation that completes the type storage process by
- * adding the required properties and edges to the type vertices
- * that were created.
- */
-public class TypePersistenceVisitor implements TypeVisitor {
-
- private static final Logger LOG = LoggerFactory.getLogger(TypePersistenceVisitor.class);
- private static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- private final GraphBackedTypeStore typeStore_;
- private final Map<String,AtlasVertex> typeVertices;
- private final TypeSystem typeSystem;
-
- /**
- * @param graphBackedTypeStore
- */
- public TypePersistenceVisitor(GraphBackedTypeStore graphBackedTypeStore, Map<String,AtlasVertex> typeVertices, TypeSystem typeSystem) {
- typeStore_ = graphBackedTypeStore;
- this.typeVertices = typeVertices;
- this.typeSystem = typeSystem;
- }
-
- @Override
- public void visitEnumeration(EnumType dataType) throws AtlasException {
- AtlasVertex vertex = typeVertices.get(dataType.getName());
- List<String> values = new ArrayList<>(dataType.values().size());
- for (EnumValue enumValue : dataType.values()) {
- String key = GraphBackedTypeStore.getPropertyKey(dataType.getName(), enumValue.value);
- setProperty(vertex, key, enumValue.ordinal);
- values.add(enumValue.value);
- }
- setProperty(vertex, GraphBackedTypeStore.getPropertyKey(dataType.getName()), values);
-
- }
- @Override
- public void visitAttributeDataType(String typeName, AttributeInfo attribute, IDataType attrType) throws AtlasException {
- AtlasVertex vertex = typeVertices.get(typeName);
- String vertexTypeName = GraphHelper.getSingleValuedProperty(vertex, Constants.TYPENAME_PROPERTY_KEY, String.class);
- AtlasVertex attrVertex = typeVertices.get(attrType.getName());
- String label = GraphBackedTypeStore.getEdgeLabel(vertexTypeName, attribute.name);
- graphHelper.getOrCreateEdge(vertex, attrVertex, label);
- }
- @Override
- public void visitSuperType(String typeName, String superTypeName) throws AtlasException {
- AtlasVertex vertex = typeVertices.get(typeName);
- HierarchicalType superType = typeSystem.getDataType(HierarchicalType.class, superTypeName);
- AtlasVertex superVertex = typeVertices.get(superTypeName);
- graphHelper.getOrCreateEdge(vertex, superVertex, GraphBackedTypeStore.SUPERTYPE_EDGE_LABEL);
- }
-
- @Override
- public void visitAttributeNames(String typeName, List<String> attrNames) throws AtlasException {
- AtlasVertex vertex = typeVertices.get(typeName);
- setProperty(vertex, GraphBackedTypeStore.getPropertyKey(typeName), attrNames);
-
- }
-
- @Override
- public void visitAttribute(String typeName, AttributeInfo attribute) throws AtlasException {
- AtlasVertex vertex = typeVertices.get(typeName);
- String propertyKey = GraphBackedTypeStore.getPropertyKey(typeName, attribute.name);
- try {
- setProperty(vertex, propertyKey, attribute.toJson());
- } catch (JSONException e) {
- throw new StorageException(typeName, e);
- }
- }
-
- @Override
- public void visitDataType(TypeCategory category, String typeName, String typeDescription) {
- //nothing to do
-
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexFinder.java b/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexFinder.java
deleted file mode 100644
index 8b38152..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexFinder.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-/**
- * TypeVisitor implementation that builds up a list of type vertices
- * that need to be created for the types that are being stored.
- *
- */
-public class TypeVertexFinder implements TypeVisitor {
-
- private final List<TypeVertexInfo> toCreate = new ArrayList<TypeVertexInfo>();
- private final Set<String> typesIncluded = new HashSet<String>();
- private final TypeSystem typeSystem;
-
- public TypeVertexFinder(TypeSystem ts) {
- typeSystem = ts;
- }
-
-
- @Override
- public void visitEnumeration(EnumType dataType) {
- visitDataType(dataType);
- }
-
- private void addTypeIfNeeded(TypeVertexInfo info) {
- if(! typesIncluded.contains(info.getTypeName())) {
- toCreate.add(info);
- typesIncluded.add(info.getTypeName());
- }
- }
-
- @Override
- public void visitAttributeDataType(String typeName, AttributeInfo sourceAttr, IDataType attrType) throws AtlasException {
- visitDataType(attrType);
- }
-
- @Override
- public void visitSuperType(String typeName, String superTypeName) throws AtlasException {
- HierarchicalType superType = typeSystem.getDataType(HierarchicalType.class, superTypeName);
- visitDataType(superType);
- }
-
- @Override
- public void visitAttributeNames(String typeName, List<String> attrNames) throws AtlasException {
- //nothing to do
-
- }
-
- @Override
- public void visitAttribute(String typeName, AttributeInfo attribute) throws StorageException, AtlasException {
- //nothing to do
- }
-
-
- private void visitDataType(IDataType dataType) {
- TypeVertexInfo info = null;
- info = new TypeVertexInfo(dataType.getTypeCategory(), dataType.getName(), dataType.getDescription());
- addTypeIfNeeded(info);
-
- }
-
-
- public List<TypeVertexInfo> getVerticesToCreate() {
- return toCreate;
- }
-
- @Override
- public void visitDataType(TypeCategory category, String typeName, String typeDescription) {
- TypeVertexInfo info = new TypeVertexInfo(category, typeName, typeDescription);
- addTypeIfNeeded(info);
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexInfo.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexInfo.java b/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexInfo.java
deleted file mode 100644
index 32a9a19..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVertexInfo.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import java.util.Objects;
-
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-
-/**
- * Records the information needed to create a particular type vertex.
- */
-public class TypeVertexInfo {
-
- private DataTypes.TypeCategory category;
- private String typeName;
- private String typeDescription;
-
- public TypeVertexInfo(TypeCategory category, String typeName, String typeDescription) {
- super();
- this.category = category;
- this.typeName = typeName;
- this.typeDescription = typeDescription;
- }
-
- public DataTypes.TypeCategory getCategory() {
- return category;
- }
-
- public void setCategory(DataTypes.TypeCategory category) {
- this.category = category;
- }
-
- public String getTypeName() {
- return typeName;
- }
-
- public void setTypeName(String typeName) {
- this.typeName = typeName;
- }
-
- public String getTypeDescription() {
- return typeDescription;
- }
-
- public void setTypeDescription(String typeDescription) {
- this.typeDescription = typeDescription;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(category, typeName);
- }
-
- @Override
- public boolean equals(Object obj) {
-
- if (this == obj) {
- return true;
- }
-
- if (getClass() != obj.getClass()) {
- return false;
- }
-
- TypeVertexInfo other = (TypeVertexInfo)obj;
- if(! Objects.equals(category, other.category)) {
- return false;
- }
-
- if(! Objects.equals(typeName, other.typeName)) {
- return false;
- }
-
- return true;
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVisitor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVisitor.java b/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVisitor.java
deleted file mode 100644
index a6e353c..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/typestore/TypeVisitor.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import java.util.List;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.IDataType;
-
-/**
- * Callback mechanism used when storing types. As {@link GraphBackedTypeStore} traverses
- * through the types being persisted, these methods are called with the information that
- * it finds.
- */
-public interface TypeVisitor {
-
- /**
- * Called when an enumeration type is found
- * @param type
- * @throws AtlasException
- */
- void visitEnumeration(EnumType type) throws AtlasException;
-
- /**
- * Called with a data type that is associated with a given attribute. There can
- * be more than one. For example, map types have both a key and a value type.
- * This is called once for each type. This is called once for each datatype
- * associated with the given attribute.
- *
- * @param typeName The name of the type being processed.
- * @param sourceAttr The attribute in that type that we are processing.
- * @param attrType A dataType associated with that attribute.
- * @throws AtlasException
- */
- void visitAttributeDataType(String typeName, AttributeInfo sourceAttr, IDataType attrType) throws AtlasException;
-
- /**
- * Called when a super type is found. It is called once for each superType.
- *
- * @param typeName The type being processed.
- * @param superType The name of the super type that was found.
- * @throws RepositoryException
- * @throws AtlasException
- */
- void visitSuperType(String typeName, String superType) throws RepositoryException, AtlasException;
-
- /**
- * Called with the list of immediate attribute names that were found for the given type. It
- * is called once per type.
- *
- * @param typeName The name of the type that is being processed.
- * @param attrNames The names of all of the immediate attributes in the type.
- * @throws AtlasException
- */
- void visitAttributeNames(String typeName, List<String> attrNames) throws AtlasException;
-
- /**
- * Called once for each immediate attribute in a type.
- * @param typeName The name of the type that is being procesed
- * @param attribute The immediate attribute that was found
- *
- * @throws StorageException
- * @throws AtlasException
- */
- void visitAttribute(String typeName, AttributeInfo attribute) throws StorageException, AtlasException;
-
- /**
- * Called once for each struct, class, and trait type that was found. It is
- * called when we start processing that type.
- *
- * @param category The category of the type
- * @param typeName The name of the type
- * @param typeDescription The description of the type.
- */
- void visitDataType(TypeCategory category, String typeName, String typeDescription);
-}
\ No newline at end of file
[23/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/discovery/DataSetLineageServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/discovery/DataSetLineageServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/DataSetLineageServiceTest.java
deleted file mode 100644
index 3db58fe..0000000
--- a/repository/src/test/java/org/apache/atlas/discovery/DataSetLineageServiceTest.java
+++ /dev/null
@@ -1,497 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.BaseRepositoryTest;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.commons.collections.ArrayStack;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-/**
- * Unit tests for Hive LineageService.
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class DataSetLineageServiceTest extends BaseRepositoryTest {
-
- @Inject
- private DiscoveryService discoveryService;
-
- @Inject
- private DataSetLineageService lineageService;
-
- @BeforeClass
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- @DataProvider(name = "dslQueriesProvider")
- private Object[][] createDSLQueries() {
- return new String[][]{
- // joins
- {"hive_table where name=\"sales_fact\", columns"},
- {"hive_table where name=\"sales_fact\", columns select name, dataType, comment"},
- {"hive_table where name=\"sales_fact\", columns as c select c.name, c.dataType, c.comment"},
- // {"hive_db as db where (db.name=\"Reporting\"), hive_table as table select db.name,
- // table.name"},
- {"from hive_db"}, {"hive_db"}, {"hive_db where hive_db.name=\"Reporting\""},
- {"hive_db hive_db.name = \"Reporting\""},
- {"hive_db where hive_db.name=\"Reporting\" select name, owner"}, {"hive_db has name"},
- // {"hive_db, hive_table"},
- // {"hive_db, hive_process has name"},
- // {"hive_db as db1, hive_table where db1.name = \"Reporting\""},
- // {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System
- // .currentTimeMillis()},
- {"from hive_table"}, {"hive_table"}, {"hive_table is Dimension"},
- {"hive_column where hive_column isa PII"},
- // {"hive_column where hive_column isa PII select hive_column.name"},
- {"hive_column select hive_column.name"}, {"hive_column select name"},
- {"hive_column where hive_column.name=\"customer_id\""}, {"from hive_table select hive_table.name"},
- {"hive_db where (name = \"Reporting\")"},
- {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1"},
- {"hive_db where hive_db has name"},
- // {"hive_db hive_table"},
- {"hive_db where hive_db has name"},
- // {"hive_db as db1 hive_table where (db1.name = \"Reporting\")"},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 "},
- // {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
- // {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
- // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
- // {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name =
- // \"Reporting\") select db1.name as dbName, tab.name as tabName"},
- // {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
- // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
- // {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name =
- // \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName"},
- // trait searches
- {"Dimension"}, {"Fact"}, {"ETL"}, {"Metric"}, {"PII"},};
- }
-
- @Test(enabled = false)
- public void testSearchByDSLQueries(String dslQuery) throws Exception {
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = discoveryService.searchByDSL(dslQuery, new QueryParams(100, 0));
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- Assert.assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- assertNotNull(typeName);
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
- Assert.assertTrue(rows.length() >= 0); // some queries may not have any results
- System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
- }
-
- @Test(enabled = false)
- public void testGetInputsGraphInvalidArguments(final String tableName, String expectedException) throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getInputsGraph(tableName);
- }
- });
- }
-
- @Test(enabled = false)
- public void testGetInputsGraphForEntityInvalidArguments(final String tableName, String expectedException)
- throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getInputsGraph(tableName);
- }
- });
- }
-
- @Test(enabled = false)
- public void testGetInputsGraph() throws Exception {
- JSONObject results = getInputsGraph("sales_fact_monthly_mv");
- assertNotNull(results);
- System.out.println("inputs graph = " + results);
-
- JSONObject values = results.getJSONObject("values");
- assertNotNull(values);
-
- final JSONObject vertices = values.getJSONObject("vertices");
- Assert.assertEquals(vertices.length(), 4);
-
- final JSONObject edges = values.getJSONObject("edges");
- Assert.assertEquals(edges.length(), 4);
- }
-
- @Test(enabled = false)
- public void testCircularLineage() throws Exception{
- JSONObject results = getInputsGraph("table2");
- assertNotNull(results);
- System.out.println("inputs graph = " + results);
-
- JSONObject values = results.getJSONObject("values");
- assertNotNull(values);
-
- final JSONObject vertices = values.getJSONObject("vertices");
- Assert.assertEquals(vertices.length(), 2);
-
- final JSONObject edges = values.getJSONObject("edges");
- Assert.assertEquals(edges.length(), 4);
- }
-
- @Test(enabled = false)
- public void testGetInputsGraphForEntity() throws Exception {
- ITypedReferenceableInstance entity =
- repository.getEntityDefinition(HIVE_TABLE_TYPE, "name", "sales_fact_monthly_mv");
-
- JSONObject results = new JSONObject(lineageService.getInputsGraphForEntity(entity.getId()._getId()));
- assertNotNull(results);
- System.out.println("inputs graph = " + results);
-
- JSONObject values = results.getJSONObject("values");
- assertNotNull(values);
-
- final JSONObject vertices = values.getJSONObject("vertices");
- Assert.assertEquals(vertices.length(), 4);
-
- final JSONObject edges = values.getJSONObject("edges");
- Assert.assertEquals(edges.length(), 4);
- }
-
- @Test(enabled = false)
- public void testGetOutputsGraphInvalidArguments(final String tableName, String expectedException) throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getOutputsGraph(tableName);
- }
- });
- }
-
- @Test(enabled = false)
- public void testGetOutputsGraphForEntityInvalidArguments(final String tableId, String expectedException)
- throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getOutputsGraphForEntity(tableId);
- }
- });
- }
-
- @Test(enabled = false)
- public void testGetOutputsGraph() throws Exception {
- JSONObject results = getOutputsGraph("sales_fact");
- assertNotNull(results);
- System.out.println("outputs graph = " + results);
-
- JSONObject values = results.getJSONObject("values");
- assertNotNull(values);
-
- final JSONObject vertices = values.getJSONObject("vertices");
- Assert.assertEquals(vertices.length(), 3);
-
- final JSONObject edges = values.getJSONObject("edges");
- Assert.assertEquals(edges.length(), 4);
- }
-
- @Test(enabled = false)
- public void testGetOutputsGraphForEntity() throws Exception {
- ITypedReferenceableInstance entity =
- repository.getEntityDefinition(HIVE_TABLE_TYPE, "name", "sales_fact");
-
- JSONObject results = new JSONObject(lineageService.getOutputsGraphForEntity(entity.getId()._getId()));
- assertNotNull(results);
- System.out.println("outputs graph = " + results);
-
- JSONObject values = results.getJSONObject("values");
- assertNotNull(values);
-
- final JSONObject vertices = values.getJSONObject("vertices");
- Assert.assertEquals(vertices.length(), 3);
-
- final JSONObject edges = values.getJSONObject("edges");
- Assert.assertEquals(edges.length(), 4);
- }
-
- @DataProvider(name = "tableNamesProvider")
- private Object[][] tableNames() {
- return new String[][]{{"sales_fact", "4"}, {"time_dim", "3"}, {"sales_fact_daily_mv", "4"},
- {"sales_fact_monthly_mv", "4"}};
- }
-
- @Test(enabled = false)
- public void testGetSchema(String tableName, String expected) throws Exception {
- JSONObject results = getSchema(tableName);
- assertNotNull(results);
- System.out.println("columns = " + results);
-
- JSONArray rows = results.getJSONArray("rows");
- Assert.assertEquals(rows.length(), Integer.parseInt(expected));
-
- for (int index = 0; index < rows.length(); index++) {
- assertColumn(rows.getJSONObject(index));
- }
- }
-
- @Test(enabled = false)
- public void testGetSchemaForEntity(String tableName, String expected) throws Exception {
- ITypedReferenceableInstance entity =
- repository.getEntityDefinition(HIVE_TABLE_TYPE, "name", tableName);
-
- JSONObject results = new JSONObject(lineageService.getSchemaForEntity(entity.getId()._getId()));
- assertNotNull(results);
- System.out.println("columns = " + results);
-
- JSONArray rows = results.getJSONArray("rows");
- Assert.assertEquals(rows.length(), Integer.parseInt(expected));
-
- for (int index = 0; index < rows.length(); index++) {
- assertColumn(rows.getJSONObject(index));
- }
- }
-
- private void assertColumn(JSONObject jsonObject) throws JSONException {
- assertNotNull(jsonObject.getString("name"));
- assertNotNull(jsonObject.getString("comment"));
- assertNotNull(jsonObject.getString("dataType"));
- Assert.assertEquals(jsonObject.getString("$typeName$"), "hive_column");
- }
-
- @Test(enabled = false)
- public void testGetSchemaForDBEntity() throws Exception {
- String dbId = getEntityId(DATASET_SUBTYPE, "name", "dataSetSubTypeInst1");
- JSONObject results = new JSONObject(lineageService.getSchemaForEntity(dbId));
- }
-
- @DataProvider(name = "invalidArgumentsProvider")
- private Object[][] arguments() {
- return new String[][]{{null, IllegalArgumentException.class.getName()},
- {"", IllegalArgumentException.class.getName()},
- {"blah", EntityNotFoundException.class.getName()}};
- }
-
- abstract class Invoker {
- abstract void run() throws AtlasException;
- }
-
- public void testInvalidArguments(String expectedException, Invoker invoker) throws Exception {
- try {
- invoker.run();
- fail("Expected " + expectedException);
- } catch(Exception e) {
- assertEquals(e.getClass().getName(), expectedException);
- }
- }
-
- @Test(enabled = false)
- public void testGetSchemaInvalidArguments(final String tableName, String expectedException) throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getSchema(tableName);
- }
- });
- }
-
- @Test(enabled = false)
- public void testGetSchemaForEntityInvalidArguments(final String entityId, String expectedException) throws Exception {
- testInvalidArguments(expectedException, new Invoker() {
- @Override
- void run() throws AtlasException {
- lineageService.getSchemaForEntity(entityId);
- }
- });
- }
-
- private JSONObject getSchema(String tableName) throws Exception {
- return new JSONObject(lineageService.getSchema("qualified:" + tableName));
- }
-
- private JSONObject getInputsGraph(String tableName) throws Exception {
- return new JSONObject(lineageService.getInputsGraph("qualified:" + tableName));
- }
-
- private JSONObject getOutputsGraph(String tableName) throws Exception {
- return new JSONObject(lineageService.getOutputsGraph("qualified:" + tableName));
- }
-
- @Test(enabled = false)
- public void testLineageWithDelete() throws Exception {
- String tableName = "table" + random();
- createTable(tableName, 3, true);
- String tableId = getEntityId(HIVE_TABLE_TYPE, "name", tableName);
-
- JSONObject results = getSchema(tableName);
- assertEquals(results.getJSONArray("rows").length(), 3);
-
- results = getInputsGraph(tableName);
- Struct resultInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
- Map<String, Struct> vertices = (Map) resultInstance.get("vertices");
- assertEquals(vertices.size(), 2);
- Struct vertex = vertices.get(tableId);
- assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.ACTIVE.name());
-
- results = getOutputsGraph(tableName);
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 2);
-
- results = new JSONObject(lineageService.getSchemaForEntity(tableId));
- assertEquals(results.getJSONArray("rows").length(), 3);
-
- results = new JSONObject(lineageService.getInputsGraphForEntity(tableId));
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 2);
-
- results = new JSONObject(lineageService.getOutputsGraphForEntity(tableId));
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 2);
-
- //Delete the entity. Lineage for entity returns the same results as before.
- //Lineage for table name throws EntityNotFoundException
- EntityResult deleteResult = repository.deleteEntities(Arrays.asList(tableId));
- assertTrue(deleteResult.getDeletedEntities().contains(tableId));
-
- results = new JSONObject(lineageService.getSchemaForEntity(tableId));
- assertEquals(results.getJSONArray("rows").length(), 3);
-
- results = new JSONObject(lineageService.getInputsGraphForEntity(tableId));
- resultInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
- vertices = (Map) resultInstance.get("vertices");
- assertEquals(vertices.size(), 2);
- vertex = vertices.get(tableId);
- assertEquals(((Struct) vertex.get("vertexId")).get("state"), Id.EntityState.DELETED.name());
-
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 2);
-
- results = new JSONObject(lineageService.getOutputsGraphForEntity(tableId));
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 2);
-
- try {
- getSchema(tableName);
- fail("Expected EntityNotFoundException");
- } catch (EntityNotFoundException e) {
- //expected
- }
-
- try {
- getInputsGraph(tableName);
- fail("Expected EntityNotFoundException");
- } catch (EntityNotFoundException e) {
- //expected
- }
-
- try {
- getOutputsGraph(tableName);
- fail("Expected EntityNotFoundException");
- } catch (EntityNotFoundException e) {
- //expected
- }
-
- //Create table again should show new lineage
- createTable(tableName, 2, false);
- results = getSchema(tableName);
- assertEquals(results.getJSONArray("rows").length(), 2);
-
- results = getOutputsGraph(tableName);
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 0);
-
- results = getInputsGraph(tableName);
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 0);
-
- tableId = getEntityId(HIVE_TABLE_TYPE, "name", tableName);
-
- results = new JSONObject(lineageService.getSchemaForEntity(tableId));
- assertEquals(results.getJSONArray("rows").length(), 2);
-
- results = new JSONObject(lineageService.getInputsGraphForEntity(tableId));
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 0);
-
- results = new JSONObject(lineageService.getOutputsGraphForEntity(tableId));
- assertEquals(results.getJSONObject("values").getJSONObject("vertices").length(), 0);
- }
-
- private void createTable(String tableName, int numCols, boolean createLineage) throws Exception {
- String dbId = getEntityId(DATABASE_TYPE, "name", "Sales");
- Id salesDB = new Id(dbId, 0, DATABASE_TYPE);
-
- //Create the entity again and schema should return the new schema
- List<Referenceable> columns = new ArrayStack();
- for (int i = 0; i < numCols; i++) {
- columns.add(column("col" + random(), "int", "column descr"));
- }
-
- Referenceable sd =
- storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true,
- ImmutableList.of(column("time_id", "int", "time id")));
-
- Id table = table(tableName, "test table", salesDB, sd, "fetl", "External", columns);
- if (createLineage) {
- Id inTable = table("table" + random(), "test table", salesDB, sd, "fetl", "External", columns);
- Id outTable = table("table" + random(), "test table", salesDB, sd, "fetl", "External", columns);
- loadProcess("process" + random(), "hive query for monthly summary", "Tim ETL", ImmutableList.of(inTable),
- ImmutableList.of(table), "create table as select ", "plan", "id", "graph", "ETL");
- loadProcess("process" + random(), "hive query for monthly summary", "Tim ETL", ImmutableList.of(table),
- ImmutableList.of(outTable), "create table as select ", "plan", "id", "graph", "ETL");
- }
- }
-
- private String random() {
- return TestUtils.randomString(5);
- }
-
- private String getEntityId(String typeName, String attributeName, String attributeValue) throws Exception {
- return repository.getEntityDefinition(typeName, attributeName, attributeValue).getId()._getId();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/discovery/EntityDiscoveryServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/discovery/EntityDiscoveryServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/EntityDiscoveryServiceTest.java
deleted file mode 100644
index ced0aa0..0000000
--- a/repository/src/test/java/org/apache/atlas/discovery/EntityDiscoveryServiceTest.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.discovery;
-
-import org.apache.atlas.TestModules;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.discovery.AtlasSearchResult;
-import org.apache.atlas.model.discovery.SearchParameters;
-import org.apache.atlas.model.impexp.AtlasImportRequest;
-import org.apache.atlas.model.typedef.AtlasEntityDef;
-import org.apache.atlas.repository.impexp.ImportService;
-import org.apache.atlas.repository.impexp.ZipSource;
-import org.apache.atlas.repository.store.graph.AtlasEntityStore;
-import org.apache.atlas.store.AtlasTypeDefStore;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.utils.TestResourceFileUtils;
-import org.apache.commons.lang.StringUtils;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.io.IOException;
-
-import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.LOG;
-import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
-import static org.testng.Assert.*;
-import static org.testng.Assert.fail;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class EntityDiscoveryServiceTest {
-
- private final String TEST_TYPE = "test";
- private final String TEST_TYPE1 = "test1";
- private final String TEST_TYPE2 = "test2";
- private final String TEST_TYPE3 = "test3";
- private final String TEST_TYPE_WITH_SUB_TYPES = "testTypeWithSubTypes";
- private AtlasTypeRegistry typeRegistry = new AtlasTypeRegistry();
-
- AtlasEntityDef typeTest = null;
- AtlasEntityDef typeTest1 = null;
- AtlasEntityDef typeTest2 = null;
- AtlasEntityDef typeTest3 = null;
- AtlasEntityDef typeWithSubTypes = null;
-
- private final int maxTypesStrLengthInIdxQuery = 55;
-
- @Inject
- EntityDiscoveryService discoveryService;
- @Inject
- AtlasTypeDefStore typeDefStore;
- @Inject
- AtlasEntityStore entityStore;
- @Inject
- ImportService importService;
-
-
- @BeforeClass
- public void init() throws AtlasBaseException {
- typeTest = new AtlasEntityDef(TEST_TYPE);
- typeTest1 = new AtlasEntityDef(TEST_TYPE1);
- typeTest2 = new AtlasEntityDef(TEST_TYPE2);
- typeTest3 = new AtlasEntityDef(TEST_TYPE3);
- typeWithSubTypes = new AtlasEntityDef(TEST_TYPE_WITH_SUB_TYPES);
-
- typeTest1.addSuperType(TEST_TYPE_WITH_SUB_TYPES);
- typeTest2.addSuperType(TEST_TYPE_WITH_SUB_TYPES);
- typeTest3.addSuperType(TEST_TYPE_WITH_SUB_TYPES);
-
- AtlasTypeRegistry.AtlasTransientTypeRegistry ttr = typeRegistry.lockTypeRegistryForUpdate();
-
- ttr.addType(typeTest);
- ttr.addType(typeWithSubTypes);
- ttr.addType(typeTest1);
- ttr.addType(typeTest2);
- ttr.addType(typeTest3);
-
- typeRegistry.releaseTypeRegistryForUpdate(ttr, true);
- }
-
- @Test
- public void getSubTypesForType_NullStringReturnsEmptyString() throws Exception {
- invokeGetSubTypesForType(null, maxTypesStrLengthInIdxQuery);
- }
-
- @Test
- public void getSubTypesForType_BlankStringReturnsEmptyString() throws Exception {
- invokeGetSubTypesForType(" ", maxTypesStrLengthInIdxQuery);
- }
-
- @Test
- public void getSubTypesForType_EmptyStringReturnsEmptyString() throws Exception {
- invokeGetSubTypesForType("", maxTypesStrLengthInIdxQuery);
- }
-
- @Test
- public void getSubTypeForTypeWithNoSubType_ReturnsTypeString() throws Exception {
- String s = invokeGetSubTypesForType(TEST_TYPE, 10);
-
- assertEquals(s, "(" + TEST_TYPE + ")");
- }
-
- @Test
- public void getSubTypeForTypeWithSubTypes_ReturnsOrClause() throws Exception {
- String s = invokeGetSubTypesForType(TEST_TYPE_WITH_SUB_TYPES, maxTypesStrLengthInIdxQuery);
-
- assertTrue(s.startsWith("("));
- assertTrue(s.contains(TEST_TYPE_WITH_SUB_TYPES));
- assertTrue(s.contains(TEST_TYPE1));
- assertTrue(s.contains(TEST_TYPE2));
- assertTrue(s.contains(TEST_TYPE3));
- assertTrue(s.endsWith(")"));
- }
-
- @Test
- public void getSubTypeForTypeWithSubTypes_ReturnsEmptyString() throws Exception {
- String s = invokeGetSubTypesForType(TEST_TYPE_WITH_SUB_TYPES, 20);
-
- assertTrue(StringUtils.isBlank(s));
- }
-
- private String invokeGetSubTypesForType(String inputString, int maxSubTypes) throws Exception {
- String s = EntityDiscoveryService.getTypeFilter(typeRegistry, inputString, maxSubTypes);
-
- assertNotNull(s);
- return s;
- }
-
-}
[19/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
deleted file mode 100755
index 952a644..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
+++ /dev/null
@@ -1,864 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasGraphQuery;
-import org.apache.atlas.repository.graphdb.AtlasGraphQuery.ComparisionOperator;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TraitNotFoundException;
-import org.apache.atlas.typesystem.persistence.AtlasSystemAttributes;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.commons.lang.RandomStringUtils;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createUniqueRequiredAttrDef;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
-
-/**
- * GraphBackedMetadataRepository test
- *
- * Guice loads the dependencies and injects the necessary objects
- *
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphBackedMetadataRepositoryTest {
-
- @Inject
- private MetadataRepository repositoryService;
-
- @Inject
- private GraphBackedDiscoveryService discoveryService;
-
- private TypeSystem typeSystem;
- private String guid;
- private QueryParams queryParams = new QueryParams(100, 0);
-
- @BeforeClass
- public void setUp() throws Exception {
- typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
-
- assertTrue(repositoryService instanceof GraphBackedMetadataRepository);
- repositoryService = TestUtils.addTransactionWrapper(repositoryService);
- new GraphBackedSearchIndexer(new AtlasTypeRegistry());
-
- TestUtils.defineDeptEmployeeTypes(typeSystem);
- TestUtils.createHiveTypes(typeSystem);
- }
-
- @BeforeMethod
- public void setupContext() {
- TestUtils.resetRequestContext();
- }
-
- @AfterClass
- public void tearDown() {
- TypeSystem.getInstance().reset();
-// AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testSubmitEntity() throws Exception {
- ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
-
- List<String> guids = repositoryService.createEntities(hrDept).getCreatedEntities();
- Assert.assertNotNull(guids);
- Assert.assertEquals(guids.size(), 5);
- guid = guids.get(4);
- Assert.assertNotNull(guid);
- }
-
- @Test
- public void testCreateEntityWithOneNestingLevel() throws AtlasException {
-
- List<Referenceable> toValidate = new ArrayList<>();
- Referenceable dept = new Referenceable(TestUtils.DEPARTMENT_TYPE);
- toValidate.add(dept);
- dept.set(TestUtils.NAME, "test1");
-
- Referenceable mike = new Referenceable(TestUtils.PERSON_TYPE);
- toValidate.add(mike);
-
- mike.set(TestUtils.NAME, "Mike");
- mike.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- Referenceable mark = new Referenceable(TestUtils.PERSON_TYPE);
- toValidate.add(mark);
- mark.set(TestUtils.NAME, "Mark");
- mark.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- dept.set(TestUtils.EMPLOYEES_ATTR, ImmutableList.of(mike, mark));
- Map<String,Referenceable> positions = new HashMap<>();
- final String JANITOR = "janitor";
- final String RECEPTIONIST = "receptionist";
- positions.put(JANITOR, mike);
- positions.put(RECEPTIONIST, mark);
- dept.set(TestUtils.POSITIONS_ATTR, positions);
-
-
- ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, TestUtils.DEPARTMENT_TYPE);
- ITypedReferenceableInstance deptInstance = deptType.convert(dept, Multiplicity.REQUIRED);
-
- CreateUpdateEntitiesResult result = repositoryService.createEntities(deptInstance);
-
- validateGuidMapping(toValidate, result);
- }
-
-
- @Test
- public void testCreateEntityWithTwoNestingLevels() throws AtlasException {
-
- List<Referenceable> toVerify = new ArrayList<>();
- Referenceable dept = new Referenceable(TestUtils.DEPARTMENT_TYPE);
- toVerify.add(dept);
- dept.set(TestUtils.NAME, "test2");
-
- Referenceable wallace = new Referenceable(TestUtils.PERSON_TYPE);
- toVerify.add(wallace);
- wallace.set(TestUtils.NAME, "Wallace");
- wallace.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- Referenceable wallaceComputer = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(wallaceComputer);
- wallaceComputer.set("name", "wallaceComputer");
- wallace.set(TestUtils.ASSETS_ATTR, ImmutableList.of(wallaceComputer));
-
- Referenceable jordan = new Referenceable(TestUtils.PERSON_TYPE);
- toVerify.add(jordan);
- jordan.set(TestUtils.NAME, "Jordan");
- jordan.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- Referenceable jordanComputer = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(jordanComputer);
- jordanComputer.set("name", "jordanComputer");
- jordan.set(TestUtils.ASSETS_ATTR, ImmutableList.of(jordanComputer));
-
- dept.set(TestUtils.EMPLOYEES_ATTR, ImmutableList.of(wallace, jordan));
- Map<String,Referenceable> positions = new HashMap<>();
- final String JANITOR = "janitor";
- final String RECEPTIONIST = "receptionist";
- positions.put(JANITOR, wallace);
- positions.put(RECEPTIONIST, jordan);
- dept.set(TestUtils.POSITIONS_ATTR, positions);
-
-
- ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, TestUtils.DEPARTMENT_TYPE);
- ITypedReferenceableInstance deptInstance = deptType.convert(dept, Multiplicity.REQUIRED);
-
- CreateUpdateEntitiesResult result = repositoryService.createEntities(deptInstance);
- validateGuidMapping(toVerify, result);
- }
-
-
- @Test
- public void testCreateEntityWithThreeNestingLevels() throws AtlasException {
-
- List<Referenceable> toVerify = new ArrayList<>();
-
- Referenceable dept = new Referenceable(TestUtils.DEPARTMENT_TYPE);
- toVerify.add(dept);
- dept.set(TestUtils.NAME, "test3");
-
- Referenceable barry = new Referenceable(TestUtils.PERSON_TYPE);
- toVerify.add(barry);
- barry.set(TestUtils.NAME, "barry");
- barry.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- Referenceable barryComputer = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(barryComputer);
- barryComputer.set("name", "barryComputer");
- barry.set(TestUtils.ASSETS_ATTR, ImmutableList.of(barryComputer));
-
- Referenceable barryHardDrive = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(barryHardDrive);
- barryHardDrive.set("name", "barryHardDrive");
-
- Referenceable barryCpuFan = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(barryCpuFan);
- barryCpuFan.set("name", "barryCpuFan");
-
- Referenceable barryVideoCard = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(barryVideoCard);
- barryVideoCard.set("name", "barryVideoCard");
-
- barryComputer.set("childAssets", ImmutableList.of(barryHardDrive, barryVideoCard, barryCpuFan));
-
-
- Referenceable jacob = new Referenceable(TestUtils.PERSON_TYPE);
- toVerify.add(jacob);
- jacob.set(TestUtils.NAME, "jacob");
- jacob.set(TestUtils.DEPARTMENT_ATTR, dept);
-
- Referenceable jacobComputer = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(jacobComputer);
- jacobComputer.set("name", "jacobComputer");
- jacob.set(TestUtils.ASSETS_ATTR, ImmutableList.of(jacobComputer));
-
- Referenceable jacobHardDrive = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(jacobHardDrive);
- jacobHardDrive.set("name", "jacobHardDrive");
-
- Referenceable jacobCpuFan = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(jacobCpuFan);
- jacobCpuFan.set("name", "jacobCpuFan");
-
- Referenceable jacobVideoCard = new Referenceable(TestUtils.ASSET_TYPE);
- toVerify.add(jacobVideoCard);
- jacobVideoCard.set("name", "jacobVideoCard");
-
- jacobComputer.set("childAssets", ImmutableList.of(jacobHardDrive, jacobVideoCard, jacobCpuFan));
-
- dept.set(TestUtils.EMPLOYEES_ATTR, ImmutableList.of(barry, jacob));
- Map<String,Referenceable> positions = new HashMap<>();
- final String JANITOR = "janitor";
- final String RECEPTIONIST = "receptionist";
- positions.put(JANITOR, barry);
- positions.put(RECEPTIONIST, jacob);
- dept.set(TestUtils.POSITIONS_ATTR, positions);
-
-
- ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, TestUtils.DEPARTMENT_TYPE);
- ITypedReferenceableInstance deptInstance = deptType.convert(dept, Multiplicity.REQUIRED);
-
- CreateUpdateEntitiesResult result = repositoryService.createEntities(deptInstance);
-
- assertEquals(result.getCreatedEntities().size(), toVerify.size());
-
- validateGuidMapping(toVerify, result);
- }
-
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testGetEntityDefinitionForDepartment() throws Exception {
- ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid);
- Assert.assertNotNull(entity);
-
- //entity state should be active by default
- Assert.assertEquals(entity.getId().getState(), Id.EntityState.ACTIVE);
-
- //System attributes created time and modified time should not be null
- AtlasSystemAttributes systemAttributes = entity.getSystemAttributes();
- Assert.assertNotNull(systemAttributes.createdTime);
- Assert.assertNotNull(systemAttributes.modifiedTime);
- }
-
- @Test(expectedExceptions = EntityNotFoundException.class)
- public void testGetEntityDefinitionNonExistent() throws Exception {
- repositoryService.getEntityDefinition("blah");
- Assert.fail();
- }
-
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testGetEntityList() throws Exception {
- List<String> entityList = repositoryService.getEntityList(TestUtils.DEPARTMENT_TYPE);
- System.out.println("entityList = " + entityList);
- Assert.assertNotNull(entityList);
- Assert.assertTrue(entityList.contains(guid));
- }
-
- @Test
- public void testGetTypeAttributeName() throws Exception {
- Assert.assertEquals(repositoryService.getTypeAttributeName(), Constants.ENTITY_TYPE_PROPERTY_KEY);
- }
-
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testGetTraitLabel() throws Exception {
- Assert.assertEquals(
- repositoryService.getTraitLabel(typeSystem.getDataType(ClassType.class, TestUtils.TABLE_TYPE),
- TestUtils.CLASSIFICATION), TestUtils.CLASSIFICATION);
- }
-
- @Test
- public void testCreateEntity() throws Exception {
- Referenceable databaseInstance = new Referenceable(TestUtils.DATABASE_TYPE);
- databaseInstance.set("name", TestUtils.DATABASE_NAME);
- databaseInstance.set("description", "foo database");
- databaseInstance.set("created", new Date(TestUtils.TEST_DATE_IN_LONG));
-
- databaseInstance.set("namespace", "colo:cluster:hive:db");
- databaseInstance.set("cluster", "cluster-1");
- databaseInstance.set("colo", "colo-1");
- System.out.println("databaseInstance = " + databaseInstance);
-
- ClassType dbType = typeSystem.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
- ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
- System.out.println("db = " + db);
-
- //Reuse the same database instance without id, with the same unique attribute
- ITypedReferenceableInstance table = createHiveTableInstance(databaseInstance);
- List<String> guids = createEntities(db, table);
- Assert.assertEquals(guids.size(), 7); //1 db + 5 columns + 1 table. Shouldn't create db again
- System.out.println("added db = " + guids.get(0));
- System.out.println("added table = " + guids.get(6));
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- public void testGetEntityDefinition() throws Exception {
- String guid = getGUID();
-
- ITypedReferenceableInstance table = repositoryService.getEntityDefinition(guid);
- Assert.assertEquals(table.getDate("created"), new Date(TestUtils.TEST_DATE_IN_LONG));
- System.out.println("*** table = " + table);
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- public void testGetTraitNames() throws Exception {
- final List<String> traitNames = repositoryService.getTraitNames(getGUID());
- Assert.assertEquals(traitNames.size(), 1);
- Assert.assertEquals(traitNames, Arrays.asList(new String[]{TestUtils.CLASSIFICATION}));
- }
-
- @Test
- public void testGetTraitNamesForEmptyTraits() throws Exception {
- final List<String> traitNames = repositoryService.getTraitNames(guid);
- Assert.assertEquals(traitNames.size(), 0);
- }
-
- @Test(expectedExceptions = EntityNotFoundException.class)
- public void testGetTraitNamesForBadEntity() throws Exception {
- repositoryService.getTraitNames(UUID.randomUUID().toString());
- Assert.fail();
- }
-
- @Test
- public void testMultipleTypesWithSameUniqueAttribute() throws Exception {
- //Two entities of different types(with same supertype that has the unique attribute) with same qualified name should succeed
- HierarchicalTypeDefinition<ClassType> supertype =
- createClassTypeDef(randomString(), ImmutableSet.<String>of(),
- createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> t1 =
- createClassTypeDef(randomString(), ImmutableSet.of(supertype.typeName));
- HierarchicalTypeDefinition<ClassType> t2 =
- createClassTypeDef(randomString(), ImmutableSet.of(supertype.typeName));
- typeSystem.defineClassTypes(supertype, t1, t2);
-
- final String name = randomString();
- String id1 = createEntity(new Referenceable(t1.typeName) {{
- set("name", name);
- }}).get(0);
- String id2 = createEntity(new Referenceable(t2.typeName) {{
- set("name", name);
- }}).get(0);
- assertNotEquals(id1, id2);
-
- ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(t1.typeName, "name", name);
- assertEquals(entity.getTypeName(), t1.typeName);
- assertEquals(entity.getId()._getId(), id1);
-
- entity = repositoryService.getEntityDefinition(t2.typeName, "name", name);
- assertEquals(entity.getTypeName(), t2.typeName);
- assertEquals(entity.getId()._getId(), id2);
- }
-
- @Test(dependsOnMethods = "testGetTraitNames")
- public void testAddTrait() throws Exception {
- final String aGUID = getGUID();
- AtlasVertex AtlasVertex = GraphHelper.getInstance().getVertexForGUID(aGUID);
- Long modificationTimestampPreUpdate = GraphHelper.getSingleValuedProperty(AtlasVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPreUpdate);
-
- List<String> traitNames = repositoryService.getTraitNames(aGUID);
- System.out.println("traitNames = " + traitNames);
- Assert.assertEquals(traitNames.size(), 1);
- Assert.assertTrue(traitNames.contains(TestUtils.CLASSIFICATION));
- Assert.assertFalse(traitNames.contains(TestUtils.PII));
-
- TraitType traitType = typeSystem.getDataType(TraitType.class, TestUtils.PII);
- ITypedStruct traitInstance = traitType.createInstance();
-
- repositoryService.addTrait(aGUID, traitInstance);
-
- // refresh trait names
- traitNames = repositoryService.getTraitNames(aGUID);
- Assert.assertEquals(traitNames.size(), 2);
- Assert.assertTrue(traitNames.contains(TestUtils.PII));
- Assert.assertTrue(traitNames.contains(TestUtils.CLASSIFICATION));
-
- // Verify modification timestamp was updated.
- GraphHelper.getInstance().getVertexForGUID(aGUID);
- Long modificationTimestampPostUpdate = GraphHelper.getSingleValuedProperty(AtlasVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPostUpdate);
- }
-
- @Test(dependsOnMethods = "testAddTrait")
- public void testAddTraitWithAttribute() throws Exception {
- final String aGUID = getGUID();
- final String traitName = "P_I_I";
-
- HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil
- .createTraitTypeDef(traitName, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- TraitType traitType = typeSystem.defineTraitType(piiTrait);
- ITypedStruct traitInstance = traitType.createInstance();
- traitInstance.set("type", "SSN");
-
- repositoryService.addTrait(aGUID, traitInstance);
-
- TestUtils.dumpGraph(TestUtils.getGraph());
-
- // refresh trait names
- List<String> traitNames = repositoryService.getTraitNames(aGUID);
- Assert.assertEquals(traitNames.size(), 3);
- Assert.assertTrue(traitNames.contains(traitName));
-
- ITypedReferenceableInstance instance = repositoryService.getEntityDefinition(aGUID);
- IStruct traitInstanceRef = instance.getTrait(traitName);
- String type = (String) traitInstanceRef.get("type");
- Assert.assertEquals(type, "SSN");
- }
-
- @Test(dependsOnMethods = "testCreateEntity", expectedExceptions = NullPointerException.class)
- public void testAddTraitWithNullInstance() throws Exception {
- repositoryService.addTrait(getGUID(), null);
- Assert.fail();
- }
-
- @Test(dependsOnMethods = "testAddTrait", expectedExceptions = RepositoryException.class)
- public void testAddTraitForBadEntity() throws Exception {
- TraitType traitType = typeSystem.getDataType(TraitType.class, TestUtils.PII);
- ITypedStruct traitInstance = traitType.createInstance();
-
- repositoryService.addTrait(UUID.randomUUID().toString(), traitInstance);
- Assert.fail();
- }
-
- @Test(dependsOnMethods = "testAddTrait")
- public void testDeleteTrait() throws Exception {
- final String aGUID = getGUID();
- AtlasVertex AtlasVertex = GraphHelper.getInstance().getVertexForGUID(aGUID);
- Long modificationTimestampPreUpdate = GraphHelper.getSingleValuedProperty(AtlasVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPreUpdate);
-
- List<String> traitNames = repositoryService.getTraitNames(aGUID);
- Assert.assertEquals(traitNames.size(), 3);
- Assert.assertTrue(traitNames.contains(TestUtils.PII));
- Assert.assertTrue(traitNames.contains(TestUtils.CLASSIFICATION));
- Assert.assertTrue(traitNames.contains("P_I_I"));
-
- repositoryService.deleteTrait(aGUID, TestUtils.PII);
-
- // refresh trait names
- traitNames = repositoryService.getTraitNames(aGUID);
- Assert.assertEquals(traitNames.size(), 2);
- Assert.assertTrue(traitNames.contains(TestUtils.CLASSIFICATION));
- Assert.assertFalse(traitNames.contains(TestUtils.PII));
-
- // Verify modification timestamp was updated.
- GraphHelper.getInstance().getVertexForGUID(aGUID);
- Long modificationTimestampPostUpdate = GraphHelper.getSingleValuedProperty(AtlasVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPostUpdate);
- Assert.assertTrue(modificationTimestampPostUpdate > modificationTimestampPreUpdate);
- }
-
- @Test(expectedExceptions = EntityNotFoundException.class)
- public void testDeleteTraitForNonExistentEntity() throws Exception {
- repositoryService.deleteTrait(UUID.randomUUID().toString(), TestUtils.PII);
- Assert.fail();
- }
-
- @Test(expectedExceptions = TraitNotFoundException.class)
- public void testDeleteTraitForNonExistentTrait() throws Exception {
- final String aGUID = getGUID();
- repositoryService.deleteTrait(aGUID, "PCI");
- Assert.fail();
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- @GraphTransaction
- public void testGetIdFromVertex() throws Exception {
- AtlasVertex tableVertex = getTableEntityVertex();
-
- String guid = GraphHelper.getSingleValuedProperty(tableVertex, Constants.GUID_PROPERTY_KEY, String.class);
- if (guid == null) {
- Assert.fail();
- }
-
- Id expected = new Id(guid, GraphHelper.getSingleValuedProperty(tableVertex, Constants.VERSION_PROPERTY_KEY, Long.class).intValue(), TestUtils.TABLE_TYPE);
- Assert.assertEquals(GraphHelper.getIdFromVertex(TestUtils.TABLE_TYPE, tableVertex), expected);
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- @GraphTransaction
- public void testGetTypeName() throws Exception {
- AtlasVertex tableVertex = getTableEntityVertex();
- Assert.assertEquals(GraphHelper.getTypeName(tableVertex), TestUtils.TABLE_TYPE);
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- public void testSearchByDSLQuery() throws Exception {
- String dslQuery = "hive_database as PII";
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
- Assert.assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- Assert.assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- Assert.assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- Assert.assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- Assert.assertNotNull(typeName);
-
- JSONArray rows = results.getJSONArray("rows");
- Assert.assertNotNull(rows);
- Assert.assertTrue(rows.length() > 0);
-
- for (int index = 0; index < rows.length(); index++) {
- JSONObject row = rows.getJSONObject(index);
- String type = row.getString("$typeName$");
- Assert.assertEquals(type, "hive_database");
-
- String name = row.getString("name");
- Assert.assertEquals(name, TestUtils.DATABASE_NAME);
- }
- }
-
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testSearchByDSLWithInheritance() throws Exception {
- String dslQuery = "Person where name = 'Jane'";
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
- Assert.assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- Assert.assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- Assert.assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- Assert.assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- Assert.assertEquals(typeName, "Person");
-
- JSONArray rows = results.getJSONArray("rows");
- Assert.assertEquals(rows.length(), 1);
-
- JSONObject row = rows.getJSONObject(0);
- Assert.assertEquals(row.getString("$typeName$"), "Manager");
- Assert.assertEquals(row.getString("name"), "Jane");
- }
-
- @Test(dependsOnMethods = "testCreateEntity")
- public void testBug37860() throws Exception {
- String dslQuery = "hive_table as t where name = 'bar' "
- + "database where name = 'foo' and description = 'foo database' select t";
-
- TestUtils.dumpGraph(TestUtils.getGraph());
-
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = discoveryService.searchByDSL(dslQuery, queryParams);
- Assert.assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- Assert.assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- Assert.assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- Assert.assertNotNull(dataType);
-
- JSONArray rows = results.getJSONArray("rows");
- Assert.assertEquals(rows.length(), 1);
-
- }
-
- /**
- * Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
- * GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
- * GraphBackedMetadataRepositoryTest:(
- */
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testFullTextSearch() throws Exception {
- //todo fix this
- //Weird: with lucene, the test passes without sleep
- //but with elasticsearch, doesn't work without sleep. why??
- long sleepInterval = 1000;
-
- TestUtils.dumpGraph(TestUtils.getGraph());
-
- //person in hr department whose name is john
- Thread.sleep(sleepInterval);
- String response = discoveryService.searchByFullText("john", queryParams);
- Assert.assertNotNull(response);
- JSONArray results = new JSONArray(response);
- Assert.assertEquals(results.length(), 1);
- JSONObject row = (JSONObject) results.get(0);
- Assert.assertEquals(row.get("typeName"), "Person");
-
- //person in hr department who lives in santa clara
- response = discoveryService.searchByFullText("Jane AND santa AND clara", queryParams);
- Assert.assertNotNull(response);
- results = new JSONArray(response);
- Assert.assertEquals(results.length(), 1);
- row = (JSONObject) results.get(0);
- Assert.assertEquals(row.get("typeName"), "Manager");
-
- //search for person in hr department whose name starts is john/jahn
- response = discoveryService.searchByFullText("hr AND (john OR jahn)", queryParams);
- Assert.assertNotNull(response);
- results = new JSONArray(response);
- Assert.assertEquals(results.length(), 1);
- row = (JSONObject) results.get(0);
- Assert.assertEquals(row.get("typeName"), "Person");
-
- //verify limit and offset
- //higher limit should return all results
- results = new JSONArray(discoveryService.searchByFullText("Department", queryParams));
- assertTrue(results.length() > 0);
- int maxResults = results.length();
-
- //smaller limit should return those many rows
- results = new JSONArray(discoveryService.searchByFullText("Department", new QueryParams(2, 0)));
- assertEquals(results.length(), 2);
-
- //offset should offset the results
- results = new JSONArray(discoveryService.searchByFullText("Department", new QueryParams(5, 2)));
- assertEquals(results.length(), maxResults > 5 ? 5 : Math.min((maxResults - 2) % 5, 5));
-
- //higher offset shouldn't return any rows
- results = new JSONArray(discoveryService.searchByFullText("Department", new QueryParams(2, 6)));
- assertEquals(results.length(), maxResults > 6 ? Math.min(maxResults - 6, 2) : 0);
- }
-
- @Test
- public void testUTFValues() throws Exception {
- Referenceable hrDept = new Referenceable("Department");
- Referenceable john = new Referenceable("Person");
- john.set("name", randomUTF());
- john.set("department", hrDept);
-
- hrDept.set("name", randomUTF());
- hrDept.set("employees", ImmutableList.of(john));
-
- ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
- ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
-
- List<String> guids = repositoryService.createEntities(hrDept2).getCreatedEntities();
- Assert.assertNotNull(guids);
- Assert.assertEquals(guids.size(), 2);
- Assert.assertNotNull(guids.get(0));
- Assert.assertNotNull(guids.get(1));
- }
-
- @GraphTransaction
- String getGUID() {
- AtlasVertex tableVertex = getTableEntityVertex();
-
- String guid = GraphHelper.getSingleValuedProperty(tableVertex, Constants.GUID_PROPERTY_KEY, String.class);
- if (guid == null) {
- Assert.fail();
- }
- return guid;
- }
-
- AtlasVertex getTableEntityVertex() {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphQuery query = graph.query().has(Constants.ENTITY_TYPE_PROPERTY_KEY, ComparisionOperator.EQUAL, TestUtils.TABLE_TYPE);
- Iterator<AtlasVertex> results = query.vertices().iterator();
- // returning one since guid should be unique
- AtlasVertex tableVertex = results.hasNext() ? results.next() : null;
- if (tableVertex == null) {
- Assert.fail();
- }
-
- return tableVertex;
- }
-
- private boolean assertEdge(String id, String typeName) throws Exception {
- AtlasGraph graph = TestUtils.getGraph();
- Iterable<AtlasVertex> vertices = graph.query().has(Constants.GUID_PROPERTY_KEY, id).vertices();
- AtlasVertex vertex = vertices.iterator().next();
- Iterable<AtlasEdge> edges = vertex.getEdges(AtlasEdgeDirection.OUT, Constants.INTERNAL_PROPERTY_KEY_PREFIX + typeName + ".ref");
-
- if (edges.iterator().hasNext()) {
- ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(id);
- assertNotNull(entity.get("ref"));
- return true;
- }
- return false;
- }
-
- private void validateGuidMapping(List<Referenceable> toVerify, CreateUpdateEntitiesResult result)
- throws AtlasException {
- Map<String,String> guids = result.getGuidMapping().getGuidAssignments();
-
- TestUtils.assertContentsSame(result.getCreatedEntities(), guids.values());
- assertEquals(guids.size(), toVerify.size());
- for(Referenceable r : toVerify) {
- loadAndDoSimpleValidation(guids.get(r.getId()._getId()), r);
- }
- }
-
- private ITypedReferenceableInstance loadAndDoSimpleValidation(String guid, Referenceable inst) throws AtlasException {
- return TestUtils.loadAndDoSimpleValidation(guid, inst, repositoryService);
- }
-
- private List<String> createEntities(ITypedReferenceableInstance... instances) throws Exception {
- RequestContext.createContext();
- return repositoryService.createEntities(instances).getCreatedEntities();
- }
-
- private List<String> createEntity(Referenceable entity) throws Exception {
- ClassType type = typeSystem.getDataType(ClassType.class, entity.getTypeName());
- ITypedReferenceableInstance instance = type.convert(entity, Multiplicity.REQUIRED);
- return createEntities(instance);
- }
-
- private ITypedReferenceableInstance createHiveTableInstance(Referenceable databaseInstance) throws Exception {
- Referenceable tableInstance = new Referenceable(TestUtils.TABLE_TYPE, TestUtils.CLASSIFICATION);
- tableInstance.set("name", TestUtils.TABLE_NAME);
- tableInstance.set("description", "bar table");
- tableInstance.set("type", "managed");
- tableInstance.set("created", new Date(TestUtils.TEST_DATE_IN_LONG));
- tableInstance.set("tableType", 1); // enum
-
- // super type
- tableInstance.set("namespace", "colo:cluster:hive:db:table");
- tableInstance.set("cluster", "cluster-1");
- tableInstance.set("colo", "colo-1");
-
- // refer to an existing class
- tableInstance.set("database", databaseInstance);
-
- ArrayList<String> columnNames = new ArrayList<>();
- columnNames.add("first_name");
- columnNames.add("last_name");
- tableInstance.set("columnNames", columnNames);
-
- Struct traitInstance = (Struct) tableInstance.getTrait(TestUtils.CLASSIFICATION);
- traitInstance.set("tag", "foundation_etl");
-
- Struct serde1Instance = new Struct("serdeType");
- serde1Instance.set("name", "serde1");
- serde1Instance.set("serde", "serde1");
- tableInstance.set("serde1", serde1Instance);
-
- Struct serde2Instance = new Struct("serdeType");
- serde2Instance.set("name", "serde2");
- serde2Instance.set("serde", "serde2");
- tableInstance.set("serde2", serde2Instance);
-
- // HashMap<String, Referenceable> columnsMap = new HashMap<>();
- ArrayList<Referenceable> columns = new ArrayList<>();
- for (int index = 0; index < 5; index++) {
- Referenceable columnInstance = new Referenceable("column_type");
- final String name = "column_" + index;
- columnInstance.set("name", name);
- columnInstance.set("type", "string");
-
- columns.add(columnInstance);
- // columnsMap.put(name, columnInstance);
- }
- tableInstance.set("columns", columns);
- // tableInstance.set("columnsMap", columnsMap);
-
- // HashMap<String, Struct> partitionsMap = new HashMap<>();
- ArrayList<Struct> partitions = new ArrayList<>();
- for (int index = 0; index < 5; index++) {
- Struct partitionInstance = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- final String name = "partition_" + index;
- partitionInstance.set("name", name);
-
- partitions.add(partitionInstance);
- // partitionsMap.put(name, partitionInstance);
- }
- tableInstance.set("partitions", partitions);
- // tableInstance.set("partitionsMap", partitionsMap);
-
- HashMap<String, String> parametersMap = new HashMap<>();
- parametersMap.put("foo", "bar");
- parametersMap.put("bar", "baz");
- parametersMap.put("some", "thing");
- tableInstance.set("parametersMap", parametersMap);
-
- ClassType tableType = typeSystem.getDataType(ClassType.class, TestUtils.TABLE_TYPE);
- return tableType.convert(tableInstance, Multiplicity.REQUIRED);
- }
-
- private String randomUTF() {
- return RandomStringUtils.random(10);
- }
-
- private String randomString() {
- return TestUtils.randomString(10);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositoryHardDeleteTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositoryHardDeleteTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositoryHardDeleteTest.java
deleted file mode 100644
index 9c0ef43..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositoryHardDeleteTest.java
+++ /dev/null
@@ -1,212 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.NullRequiredAttributeException;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.Assert;
-
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertFalse;
-import static org.testng.Assert.assertNull;
-import static org.testng.Assert.fail;
-import static org.testng.AssertJUnit.assertNotNull;
-
-public class GraphBackedRepositoryHardDeleteTest extends GraphBackedMetadataRepositoryDeleteTestBase {
- @Override
- DeleteHandler getDeleteHandler(TypeSystem typeSystem) {
- return new HardDeleteHandler(typeSystem);
- }
-
- @Override
- protected void assertTestDeleteEntityWithTraits(String guid) {
- //entity is deleted. So, no assertions
- }
-
- @Override
- protected void assertTableForTestDeleteReference(String tableId) {
- //entity is deleted. So, no assertions
- }
-
- @Override
- protected void assertColumnForTestDeleteReference(ITypedReferenceableInstance tableInstance) throws AtlasException {
- List<ITypedReferenceableInstance> columns =
- (List<ITypedReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
- assertNull(columns);
- }
-
- @Override
- protected void assertProcessForTestDeleteReference(ITypedReferenceableInstance processInstance) throws Exception {
- //assert that outputs is empty
- ITypedReferenceableInstance newProcess =
- repositoryService.getEntityDefinition(processInstance.getId()._getId());
- assertNull(newProcess.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS));
- }
-
- @Override
- protected void assertEntityDeleted(String id) throws Exception {
- try {
- repositoryService.getEntityDefinition(id);
- fail("Expected EntityNotFoundException");
- } catch(EntityNotFoundException e) {
- // expected
- }
- }
-
- @Override
- protected void assertDeletedColumn(ITypedReferenceableInstance tableInstance) throws AtlasException {
- assertEquals(((List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME)).size(), 2);
- }
-
- @Override
- protected void assertTestDeleteEntities(ITypedReferenceableInstance tableInstance) {
- int vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.TABLE_TYPE).size();
- assertEquals(vertexCount, 0);
-
- vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.COLUMN_TYPE).size();
- assertEquals(vertexCount, 0);
- }
-
- @Override
- protected void assertVerticesDeleted(List<AtlasVertex> vertices) {
- assertEquals(vertices.size(), 0);
- }
-
- @Override
- protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(String janeGuid) throws Exception {
- // Verify that max is no longer a subordinate of jane.
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(janeGuid);
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- Assert.assertEquals(subordinates.size(), 1);
- }
-
- @Override
- protected void assertJohnForTestDisconnectBidirectionalReferences(ITypedReferenceableInstance john,
- String janeGuid) throws Exception {
- assertNull(john.get("manager"));
- }
-
- @Override
- protected void assertMaxForTestDisconnectBidirectionalReferences(Map<String, String> nameGuidMap)
- throws Exception {
- // Verify that the Department.employees reference to the deleted employee
- // was disconnected.
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(nameGuidMap.get("hr"));
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
- Assert.assertEquals(employees.size(), 3);
- String maxGuid = nameGuidMap.get("Max");
- for (ITypedReferenceableInstance employee : employees) {
- Assert.assertNotEquals(employee.getId()._getId(), maxGuid);
- }
-
- // Verify that the Manager.subordinates reference to the deleted employee
- // Max was disconnected.
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(nameGuidMap.get("Jane"));
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- assertEquals(subordinates.size(), 1);
-
- // Verify that max's Person.mentor unidirectional reference to john was disconnected.
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition(nameGuidMap.get("John"));
- assertNull(john.get("mentor"));
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- List<ITypedReferenceableInstance> columns, String columnGuid) {
- assertEquals(columns.size(), 4);
- for (ITypedReferenceableInstance column : columns) {
- assertFalse(column.getId()._getId().equals(columnGuid));
- }
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(String structContainerGuid)
- throws Exception {
- // Verify that the unidirectional references from the struct and trait instances
- // to the deleted entities were disconnected.
- ITypedReferenceableInstance structContainerConvertedEntity =
- repositoryService.getEntityDefinition(structContainerGuid);
- ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
- assertNull(struct.get("target"));
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- assertNotNull(trait);
- assertNull(trait.get("target"));
- }
-
- @Override
- protected void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception {
- // Verify map references from mapOwner were disconnected.
- ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- assertNull(mapOwnerInstance.get("map"));
- assertNull(mapOwnerInstance.get("biMap"));
-
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- Object object = mapOwnerVertex.getProperty("MapOwner.map.value1", String.class);
- assertNull(object);
- object = mapOwnerVertex.getProperty("MapOwner.biMap.value1", String.class);
- assertNull(object);
- }
-
- @Override
- protected void assertTestDeleteTargetOfMultiplicityRequiredReference() throws Exception {
-
- Assert.fail("Lower bound on attribute Manager.subordinates was not enforced - " +
- NullRequiredAttributeException.class.getSimpleName() + " was expected but none thrown");
- }
-
- @Override
- protected void assertTestLowerBoundsIgnoredOnDeletedEntities(List<ITypedReferenceableInstance> employees) {
-
- Assert.assertEquals(employees.size(), 1, "References to deleted employees were not disconnected");
- }
-
- @Override
- protected void assertTestLowerBoundsIgnoredOnCompositeDeletedEntities(String hrDeptGuid) throws Exception {
-
- try {
- repositoryService.getEntityDefinition(hrDeptGuid);
- Assert.fail(EntityNotFoundException.class.getSimpleName() + " was expected but none thrown");
- }
- catch (EntityNotFoundException e) {
- // good
- }
- }
-
- @Override
- protected void verifyTestDeleteEntityWithDuplicateReferenceListElements(List columnsPropertyValue) {
-
- // With hard deletes enabled, verify that duplicate edge IDs for deleted edges
- // were removed from the array property list.
- Assert.assertEquals(columnsPropertyValue.size(), 2);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositorySoftDeleteTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositorySoftDeleteTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositorySoftDeleteTest.java
deleted file mode 100644
index 8c86235..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedRepositorySoftDeleteTest.java
+++ /dev/null
@@ -1,239 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.Id.EntityState;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.Assert;
-
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
-import static org.apache.atlas.TestUtils.NAME;
-import static org.apache.atlas.TestUtils.PII;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
-
-public class GraphBackedRepositorySoftDeleteTest extends GraphBackedMetadataRepositoryDeleteTestBase {
- @Override
- DeleteHandler getDeleteHandler(TypeSystem typeSystem) {
- return new SoftDeleteHandler(typeSystem);
- }
-
- @Override
- protected void assertTestDeleteEntityWithTraits(String guid) throws Exception {
- ITypedReferenceableInstance instance = repositoryService.getEntityDefinition(guid);
- assertTrue(instance.getTraits().contains(PII));
- }
-
- @Override
- protected void assertTableForTestDeleteReference(String tableId) throws Exception {
- ITypedReferenceableInstance table = repositoryService.getEntityDefinition(tableId);
- assertNotNull(table.get(NAME));
- assertNotNull(table.get("description"));
- assertNotNull(table.get("type"));
- assertNotNull(table.get("tableType"));
- assertNotNull(table.get("created"));
-
- Id dbId = (Id) table.get("database");
- assertNotNull(dbId);
-
- ITypedReferenceableInstance db = repositoryService.getEntityDefinition(dbId.getId()._getId());
- assertNotNull(db);
- assertEquals(db.getId().getState(), Id.EntityState.ACTIVE);
- }
-
- @Override
- protected void assertColumnForTestDeleteReference(ITypedReferenceableInstance tableInstance) throws AtlasException {
- List<ITypedReferenceableInstance> columns =
- (List<ITypedReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
- assertEquals(columns.size(), 1);
- assertEquals(columns.get(0).getId().getState(), Id.EntityState.DELETED);
- }
-
- @Override
- protected void assertProcessForTestDeleteReference(ITypedReferenceableInstance expected) throws Exception {
- ITypedReferenceableInstance process = repositoryService.getEntityDefinition(expected.getId()._getId());
- List<ITypedReferenceableInstance> outputs =
- (List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
- List<ITypedReferenceableInstance> expectedOutputs =
- (List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
- assertEquals(outputs.size(), expectedOutputs.size());
- }
-
- @Override
- protected void assertEntityDeleted(String id) throws Exception {
- ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(id);
- assertEquals(entity.getId().getState(), Id.EntityState.DELETED);
- }
-
- @Override
- protected void assertDeletedColumn(ITypedReferenceableInstance tableInstance) throws AtlasException {
- List<IReferenceableInstance> columns = (List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
- assertEquals(columns.size(), 3);
- assertEquals(columns.get(0).getId().getState(), Id.EntityState.DELETED);
- }
-
- @Override
- protected void assertTestDeleteEntities(ITypedReferenceableInstance expected) throws Exception {
- //Assert that the deleted table can be fully constructed back
- ITypedReferenceableInstance table = repositoryService.getEntityDefinition(expected.getId()._getId());
- List<ITypedReferenceableInstance> columns =
- (List<ITypedReferenceableInstance>) table.get(TestUtils.COLUMNS_ATTR_NAME);
- List<ITypedReferenceableInstance> expectedColumns =
- (List<ITypedReferenceableInstance>) table.get(TestUtils.COLUMNS_ATTR_NAME);
- assertEquals(columns.size(), expectedColumns.size());
- assertNotNull(table.get("database"));
- }
-
- @Override
- protected void assertVerticesDeleted(List<AtlasVertex> vertices) {
- for (AtlasVertex vertex : vertices) {
- assertEquals(GraphHelper.getSingleValuedProperty(vertex, Constants.STATE_PROPERTY_KEY, String.class), Id.EntityState.DELETED.name());
- }
- }
-
- @Override
- protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(String janeGuid) throws Exception {
- // Verify Jane's subordinates reference cardinality is still 2.
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(janeGuid);
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- Assert.assertEquals(subordinates.size(), 2);
- }
-
- @Override
- protected void assertJohnForTestDisconnectBidirectionalReferences(ITypedReferenceableInstance john, String janeGuid)
- throws Exception {
- Id mgr = (Id) john.get("manager");
- assertNotNull(mgr);
- assertEquals(mgr._getId(), janeGuid);
- assertEquals(mgr.getState(), Id.EntityState.DELETED);
- }
-
- @Override
- protected void assertMaxForTestDisconnectBidirectionalReferences(Map<String, String> nameGuidMap) throws Exception {
- // Verify that the Department.employees reference to the deleted employee
- // was disconnected.
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(nameGuidMap.get("hr"));
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
- Assert.assertEquals(employees.size(), 4);
- String maxGuid = nameGuidMap.get("Max");
- for (ITypedReferenceableInstance employee : employees) {
- if (employee.getId()._getId().equals(maxGuid)) {
- assertEquals(employee.getId().getState(), Id.EntityState.DELETED);
- }
- }
-
- // Verify that the Manager.subordinates still references deleted employee
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(nameGuidMap.get("Jane"));
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- assertEquals(subordinates.size(), 2);
- for (ITypedReferenceableInstance subordinate : subordinates) {
- if (subordinate.getId()._getId().equals(maxGuid)) {
- assertEquals(subordinate.getId().getState(), Id.EntityState.DELETED);
- }
- }
-
- // Verify that max's Person.mentor unidirectional reference to john was disconnected.
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition(nameGuidMap.get("John"));
- Id mentor = (Id) john.get("mentor");
- assertEquals(mentor._getId(), maxGuid);
- assertEquals(mentor.getState(), Id.EntityState.DELETED);
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- List<ITypedReferenceableInstance> columns, String columnGuid) {
- Assert.assertEquals(columns.size(), 5);
- for (ITypedReferenceableInstance column : columns) {
- if (column.getId()._getId().equals(columnGuid)) {
- assertEquals(column.getId().getState(), Id.EntityState.DELETED);
- } else {
- assertEquals(column.getId().getState(), Id.EntityState.ACTIVE);
- }
- }
-
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(String structContainerGuid)
- throws Exception {
- // Verify that the unidirectional references from the struct and trait instances
- // to the deleted entities were not disconnected.
- ITypedReferenceableInstance structContainerConvertedEntity =
- repositoryService.getEntityDefinition(structContainerGuid);
- ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
- assertNotNull(struct.get("target"));
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- assertNotNull(trait);
- assertNotNull(trait.get("target"));
-
- }
-
- @Override
- protected void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception {
- ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- Map<String, ITypedReferenceableInstance> map =
- (Map<String, ITypedReferenceableInstance>) mapOwnerInstance.get("map");
- assertNotNull(map);
- assertEquals(map.size(), 1);
- Map<String, ITypedReferenceableInstance> biMap =
- (Map<String, ITypedReferenceableInstance>) mapOwnerInstance.get("biMap");
- assertNotNull(biMap);
- assertEquals(biMap.size(), 1);
- }
-
- @Override
- protected void assertTestDeleteTargetOfMultiplicityRequiredReference() throws Exception {
- // No-op - it's ok that no exception was thrown if soft deletes are enabled.
- }
-
- @Override
- protected void assertTestLowerBoundsIgnoredOnDeletedEntities(List<ITypedReferenceableInstance> employees) {
-
- Assert.assertEquals(employees.size(), 4, "References to deleted employees should not have been disconnected with soft deletes enabled");
- }
-
- @Override
- protected void assertTestLowerBoundsIgnoredOnCompositeDeletedEntities(String hrDeptGuid) throws Exception {
-
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Assert.assertEquals(hrDept.getId().getState(), EntityState.DELETED);
- }
-
- @Override
- protected void verifyTestDeleteEntityWithDuplicateReferenceListElements(List columnsPropertyValue) {
-
- // With soft deletes enabled, verify that edge IDs for deleted edges
- // were not removed from the array property list.
- Assert.assertEquals(columnsPropertyValue.size(), 4);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerMockTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerMockTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerMockTest.java
deleted file mode 100644
index a98ef38..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerMockTest.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.ha.HAConfiguration;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.IndexException;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasGraphManagement;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.commons.configuration.Configuration;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import static org.mockito.Mockito.verifyZeroInteractions;
-import static org.mockito.Mockito.when;
-
-public class GraphBackedSearchIndexerMockTest implements IAtlasGraphProvider {
-
- @Mock
- private Configuration configuration;
-
- @Mock
- private AtlasGraph graph;
-
- @Mock
- private AtlasGraphManagement management;
-
- @Mock
- private AtlasTypeRegistry typeRegistry;
-
- @BeforeMethod
- public void setup() {
- MockitoAnnotations.initMocks(this);
- }
-
- @Test
- public void testSearchIndicesAreInitializedOnConstructionWhenHAIsDisabled() throws IndexException, RepositoryException {
- when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
- when(graph.getManagementSystem()).thenReturn(management);
-
- GraphBackedSearchIndexer graphBackedSearchIndexer = new GraphBackedSearchIndexer(this, configuration, typeRegistry);
- }
-
- @Test
- public void testSearchIndicesAreNotInitializedOnConstructionWhenHAIsEnabled() throws IndexException, RepositoryException {
- when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(graph.getManagementSystem()).thenReturn(management);
- when(management.containsPropertyKey(Constants.VERTEX_TYPE_PROPERTY_KEY)).thenReturn(true);
-
- new GraphBackedSearchIndexer(this, configuration, typeRegistry);
- verifyZeroInteractions(management);
- }
-
- @Test
- public void testIndicesAreReinitializedWhenServerBecomesActive() throws AtlasException {
- when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(graph.getManagementSystem()).thenReturn(management);
-
- GraphBackedSearchIndexer graphBackedSearchIndexer = new GraphBackedSearchIndexer(this, configuration, typeRegistry);
- graphBackedSearchIndexer.instanceIsActive();
- }
-
-
- @Override
- public AtlasGraph get() {
- return graph;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerTest.java
deleted file mode 100644
index feffabf..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexerTest.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import com.google.inject.Inject;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasGraphIndex;
-import org.apache.atlas.repository.graphdb.AtlasGraphManagement;
-import org.apache.atlas.repository.graphdb.AtlasPropertyKey;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Set;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.testng.Assert.*;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphBackedSearchIndexerTest {
- @Inject
- private GraphBackedSearchIndexer graphBackedSearchIndexer;
-
- @Test
- public void verifySystemMixedIndexes() {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphManagement managementSystem = graph.getManagementSystem();
- try {
- AtlasGraphIndex vertexIndex = managementSystem.getGraphIndex(Constants.VERTEX_INDEX);
- assertNotNull(vertexIndex);
- assertTrue(vertexIndex.isMixedIndex());
- assertFalse(vertexIndex.isEdgeIndex());
- assertTrue(vertexIndex.isVertexIndex());
-
- AtlasGraphIndex edgeIndex = managementSystem.getGraphIndex(Constants.EDGE_INDEX);
- assertNotNull(edgeIndex);
- assertTrue(edgeIndex.isMixedIndex());
- assertTrue(edgeIndex.isEdgeIndex());
- assertFalse(edgeIndex.isVertexIndex());
-
-
- verifyVertexIndexContains(managementSystem, Constants.STATE_PROPERTY_KEY);
- }
- finally {
- managementSystem.rollback();
- }
- }
-
- @Test
- public void verifySystemCompositeIndexes() {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphManagement managementSystem = graph.getManagementSystem();
- try {
- verifySystemCompositeIndex(managementSystem, Constants.GUID_PROPERTY_KEY, true);
- verifyVertexIndexContains(managementSystem, Constants.GUID_PROPERTY_KEY);
-
- verifySystemCompositeIndex(managementSystem, Constants.ENTITY_TYPE_PROPERTY_KEY, false);
- verifyVertexIndexContains(managementSystem, Constants.ENTITY_TYPE_PROPERTY_KEY);
-
- verifySystemCompositeIndex(managementSystem, Constants.SUPER_TYPES_PROPERTY_KEY, false);
- verifyVertexIndexContains(managementSystem, Constants.SUPER_TYPES_PROPERTY_KEY);
-
- verifySystemCompositeIndex(managementSystem, Constants.TRAIT_NAMES_PROPERTY_KEY, false);
- verifyVertexIndexContains(managementSystem, Constants.TRAIT_NAMES_PROPERTY_KEY);
- }
- finally {
- managementSystem.rollback();
- }
- }
-
- @Test
- public void verifyFullTextIndex() {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphManagement managementSystem = graph.getManagementSystem();
- try {
- AtlasGraphIndex fullTextIndex = managementSystem.getGraphIndex(Constants.FULLTEXT_INDEX);
- assertTrue(fullTextIndex.isMixedIndex());
-
- Arrays.asList(fullTextIndex.getFieldKeys()).contains(
- managementSystem.getPropertyKey(Constants.ENTITY_TEXT_PROPERTY_KEY));
- }
- finally {
- managementSystem.rollback();
- }
- }
-
- @Test
- public void verifyTypeStoreIndexes() {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphManagement managementSystem = graph.getManagementSystem();
- try {
- verifySystemCompositeIndex(managementSystem, Constants.TYPENAME_PROPERTY_KEY, true);
- verifyVertexIndexContains(managementSystem, Constants.TYPENAME_PROPERTY_KEY);
-
- verifySystemCompositeIndex(managementSystem, Constants.VERTEX_TYPE_PROPERTY_KEY, false);
- verifyVertexIndexContains(managementSystem, Constants.VERTEX_TYPE_PROPERTY_KEY);
- }
- finally {
- managementSystem.rollback();
- }
-
- }
-
- @Test
- public void verifyUserDefinedTypeIndex() throws AtlasException {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasGraphManagement managementSystem = graph.getManagementSystem();
- try {
- TypeSystem typeSystem = TypeSystem.getInstance();
-
- String enumName = "randomEnum" + TestUtils.randomString(10);
- EnumType managedType = typeSystem.defineEnumType(enumName, new EnumValue("randomEnumValue", 0));
-
- HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
- createClassTypeDef("Database", "Database type description", null,
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("managedType", managedType));
-
- ClassType databaseType = typeSystem.defineClassType(databaseTypeDefinition);
- graphBackedSearchIndexer.onAdd(Arrays.asList(databaseType));
-
- verifySystemCompositeIndex(managementSystem, "Database.name" + Constants.ENTITY_TYPE_PROPERTY_KEY, false);
- verifyVertexIndexContains(managementSystem, "Database.name" + Constants.ENTITY_TYPE_PROPERTY_KEY);
- verifySystemCompositeIndex(managementSystem, "Database.name" + Constants.SUPER_TYPES_PROPERTY_KEY, false);
-
- verifyVertexIndexContains(managementSystem, "Database.managedType");
- }
- finally {
- //search indexer uses its own titan management transaction
- managementSystem.rollback();
- }
- }
-
- private void verifyVertexIndexContains(AtlasGraphManagement managementSystem, String indexName) {
- AtlasGraphIndex vertexIndex = managementSystem.getGraphIndex(Constants.VERTEX_INDEX);
- Set<AtlasPropertyKey> fieldKeys = vertexIndex.getFieldKeys();
- Arrays.asList(fieldKeys).contains(managementSystem.getPropertyKey(indexName));
- }
-
- private void verifySystemCompositeIndex(AtlasGraphManagement managementSystem, String indexName, boolean isUnique) {
- AtlasGraphIndex systemIndex = managementSystem.getGraphIndex(indexName);
- assertNotNull(systemIndex);
- assertTrue(systemIndex.isCompositeIndex());
- if (isUnique) {
- assertTrue(systemIndex.isUnique());
- } else {
- assertFalse(systemIndex.isUnique());
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperMockTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperMockTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperMockTest.java
deleted file mode 100644
index a0894cd..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperMockTest.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.mockito.MockitoAnnotations;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.util.Iterator;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-
-public class GraphHelperMockTest {
-
- private GraphHelper graphHelperInstance;
-
- private AtlasGraph graph;
-
- @BeforeClass
- public void setup() {
- MockitoAnnotations.initMocks(this);
- graph = mock(AtlasGraph.class);
- graphHelperInstance = GraphHelper.getInstance(graph);
- }
-
- @Test(expectedExceptions = RepositoryException.class)
- public void testGetOrCreateEdgeLabelWithMaxRetries() throws Exception {
- final String edgeLabel = "testLabel";
- AtlasVertex v1 = mock(AtlasVertex.class);
- AtlasVertex v2 = mock(AtlasVertex.class);
-
- Iterable noEdgesIterable = new Iterable<AtlasEdge>() {
- @Override
- public Iterator<AtlasEdge> iterator() {
- return new Iterator<AtlasEdge>() {
- @Override
- public boolean hasNext() {
- return false;
- }
-
- @Override
- public AtlasEdge next() {
- return null;
- }
-
- @Override
- public void remove() {
- }
- };
- }
- };
- when(v2.getEdges(AtlasEdgeDirection.IN)).thenReturn(noEdgesIterable);
- when(v1.getEdges(AtlasEdgeDirection.OUT)).thenReturn(noEdgesIterable);
-
- when(v1.getId()).thenReturn("1234");
- when(v2.getId()).thenReturn("5678");
- when(graph.addEdge(v1, v2, edgeLabel)).thenThrow(new RuntimeException("Unique property constraint violated"));
- graphHelperInstance.getOrCreateEdge(v1, v2, edgeLabel);
- }
-
- @Test
- public void testGetOrCreateEdgeLabelWithRetries() throws Exception {
- final String edgeLabel = "testLabel";
- AtlasVertex v1 = mock(AtlasVertex.class);
- AtlasVertex v2 = mock(AtlasVertex.class);
- AtlasEdge edge = mock(AtlasEdge.class);
-
- Iterable noEdgesIterable = new Iterable<AtlasEdge>() {
- @Override
- public Iterator<AtlasEdge> iterator() {
- return new Iterator<AtlasEdge>() {
- @Override
- public boolean hasNext() {
- return false;
- }
-
- @Override
- public AtlasEdge next() {
- return null;
- }
-
- @Override
- public void remove() {
- }
- };
- }
- };
- when(v2.getEdges(AtlasEdgeDirection.IN)).thenReturn(noEdgesIterable);
- when(v1.getEdges(AtlasEdgeDirection.OUT)).thenReturn(noEdgesIterable);
-
- when(v1.getId()).thenReturn("v1");
- when(v2.getId()).thenReturn("v2");
- when(edge.getId()).thenReturn("edge");
- when(graph.addEdge(v1, v2, edgeLabel))
- .thenThrow(new RuntimeException("Unique property constraint violated")).thenReturn(edge);
- AtlasEdge redge = graphHelperInstance.getOrCreateEdge(v1, v2, edgeLabel);
- assertEquals(edge, redge);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperTest.java
deleted file mode 100644
index e64c2c8..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphHelperTest.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.graph.GraphHelper.VertexInfo;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.codehaus.jettison.json.JSONArray;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.testng.Assert.*;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphHelperTest {
-
-
- @DataProvider(name = "encodeDecodeTestData")
- private Object[][] createTestData() {
- return new Object[][]{
- {"hivedb$", "hivedb_d"},
- {"hivedb", "hivedb"},
- {"{hivedb}", "_ohivedb_c"},
- {"%hivedb}", "_phivedb_c"},
- {"\"hivedb\"", "_qhivedb_q"},
- {"\"$%{}", "_q_d_p_o_c"},
- {"", ""},
- {" ", " "},
- {"\n\r", "\n\r"},
- {null, null}
- };
- }
-
- @Inject
- private MetadataService metadataService;
-
- @Inject
- private GraphBackedMetadataRepository repositoryService;
-
- private TypeSystem typeSystem;
-
- @Inject
- private AtlasTypeRegistry typeRegistry;
-
- @BeforeClass
- public void setUp() throws Exception {
- typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
-
- new GraphBackedSearchIndexer(typeRegistry);
- TypesDef typesDef = TestUtils.defineHiveTypes();
- try {
- metadataService.getTypeDefinition(TestUtils.TABLE_TYPE);
- } catch (TypeNotFoundException e) {
- metadataService.createType(TypesSerialization.toJson(typesDef));
- }
- TestUtils.defineDeptEmployeeTypes(typeSystem);
- }
-
- @AfterClass
- public void tearDown() {
-// AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testGetInstancesByUniqueAttributes() throws Exception {
-
- GraphHelper helper = GraphHelper.getInstance();
- List<ITypedReferenceableInstance> instances = new ArrayList<>();
- List<String> guids = new ArrayList<>();
- TypeSystem ts = TypeSystem.getInstance();
- ClassType dbType = ts.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
-
- for(int i = 0; i < 10; i++) {
- Referenceable db = TestUtils.createDBEntity();
- String guid = createInstance(db);
- ITypedReferenceableInstance instance = convert(db, dbType);
- instances.add(instance);
- guids.add(guid);
- }
-
- //lookup vertices via getVertexForInstanceByUniqueAttributes
- List<AtlasVertex> vertices = helper.getVerticesForInstancesByUniqueAttribute(dbType, instances);
- assertEquals(instances.size(), vertices.size());
- //assert vertex matches the vertex we get through getVertexForGUID
- for(int i = 0; i < instances.size(); i++) {
- String guid = guids.get(i);
- AtlasVertex foundVertex = vertices.get(i);
- AtlasVertex expectedVertex = helper.getVertexForGUID(guid);
- assertEquals(foundVertex, expectedVertex);
- }
- }
- @Test
- public void testGetVerticesForGUIDSWithDuplicates() throws Exception {
- ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(TypeSystem.getInstance());
- List<String> result = repositoryService.createEntities(hrDept).getCreatedEntities();
- String guid = result.get(0);
- Map<String, AtlasVertex> verticesForGUIDs = GraphHelper.getInstance().getVerticesForGUIDs(Arrays.asList(guid, guid));
- Assert.assertEquals(verticesForGUIDs.size(), 1);
- Assert.assertTrue(verticesForGUIDs.containsKey(guid));
- }
- @Test
- public void testGetCompositeGuidsAndVertices() throws Exception {
- ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
- List<String> createdGuids = repositoryService.createEntities(hrDept).getCreatedEntities();
- String deptGuid = null;
- Set<String> expectedGuids = new HashSet<>();
-
- for (String guid : createdGuids) {
- ITypedReferenceableInstance entityDefinition = repositoryService.getEntityDefinition(guid);
- expectedGuids.add(guid);
- if (entityDefinition.getId().getTypeName().equals(TestUtils.DEPARTMENT_TYPE)) {
- deptGuid = guid;
- }
- }
- AtlasVertex deptVertex = GraphHelper.getInstance().getVertexForGUID(deptGuid);
- Set<VertexInfo> compositeVertices = GraphHelper.getInstance().getCompositeVertices(deptVertex);
- HashMap<String, VertexInfo> verticesByGuid = new HashMap<>();
- for (VertexInfo vertexInfo: compositeVertices) {
- verticesByGuid.put(vertexInfo.getGuid(), vertexInfo);
- }
-
- // Verify compositeVertices has entries for all expected guids.
- Assert.assertEquals(compositeVertices.size(), expectedGuids.size());
- for (String expectedGuid : expectedGuids) {
- Assert.assertTrue(verticesByGuid.containsKey(expectedGuid));
- }
- }
-
- @Test(dataProvider = "encodeDecodeTestData")
- public void testEncodeDecode(String str, String expectedEncodedStr) throws Exception {
- String encodedStr = GraphHelper.encodePropertyKey(str);
- assertEquals(encodedStr, expectedEncodedStr);
-
- String decodedStr = GraphHelper.decodePropertyKey(encodedStr);
- assertEquals(decodedStr, str);
- }
-
- @Test
- public void testGetOutgoingEdgesByLabel() throws Exception {
- AtlasGraph graph = TestUtils.getGraph();
- AtlasVertex v1 = graph.addVertex();
- AtlasVertex v2 = graph.addVertex();
- graph.addEdge(v1, v2, "l1");
- graph.addEdge(v1, v2, "l2");
-
- Iterator<AtlasEdge> iterator = GraphHelper.getInstance().getOutGoingEdgesByLabel(v1, "l1");
- assertTrue(iterator.hasNext());
- assertTrue(iterator.hasNext());
- assertNotNull(iterator.next());
- assertNull(iterator.next());
- assertFalse(iterator.hasNext());
- assertFalse(iterator.hasNext());
- }
-
- private ITypedReferenceableInstance convert(Referenceable instance, ClassType type) throws AtlasException {
-
- return type.convert(instance, Multiplicity.REQUIRED);
- }
-
- private String createInstance(Referenceable entity) throws Exception {
- TestUtils.resetRequestContext();
-
- String entityjson = InstanceSerialization.toJson(entity, true);
- JSONArray entitiesJson = new JSONArray();
- entitiesJson.put(entityjson);
- List<String> guids = metadataService.createEntities(entitiesJson.toString()).getCreatedEntities();
- if (guids != null && guids.size() > 0) {
- return guids.get(guids.size() - 1);
- }
- return null;
- }
-}
[21/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java b/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
deleted file mode 100644
index 57bc74f..0000000
--- a/repository/src/test/java/org/apache/atlas/query/QueryProcessorTest.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.query;
-
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.cache.DefaultTypeCache;
-import org.testng.annotations.Test;
-import scala.util.Either;
-import scala.util.parsing.combinator.Parsers;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.testng.Assert.assertFalse;
-import static org.testng.Assert.assertTrue;
-
-/**
- * Tests the logic for skipping type cache lookup for things that
- * cannot be types.
- *
- */
-public class QueryProcessorTest {
-
-
- @Test
- public void testAliasesNotTreatedAsTypes() throws Exception {
-
- ValidatingTypeCache tc = findTypeLookupsDuringQueryParsing("hive_db as inst where inst.name=\"Reporting\" select inst as id, inst.name");
- assertTrue(tc.wasTypeRequested("hive_db"));
- assertFalse(tc.wasTypeRequested("inst"));
- assertFalse(tc.wasTypeRequested("name"));
-
- }
-
-
- @Test
- public void testFieldInComparisionNotTreatedAsType() throws Exception {
-
- //test when the IdExpression is on the left, on the right, and on both sides of the ComparsionExpression
- ValidatingTypeCache tc = findTypeLookupsDuringQueryParsing("hive_db where name=\"Reporting\" or \"Reporting\" = name or name=name");
- assertTrue(tc.wasTypeRequested("hive_db"));
- assertFalse(tc.wasTypeRequested("name"));
-
- }
-
-
- @Test
- public void testFieldInArithmeticExprNotTreatedAsType() throws Exception {
-
- //test when the IdExpression is on the left, on the right, and on both sides of the ArithmeticExpression
- ValidatingTypeCache tc = findTypeLookupsDuringQueryParsing("hive_db where (tableCount + 3) > (tableCount + tableCount) select (3 + tableCount) as updatedCount");
-
- assertTrue(tc.wasTypeRequested("hive_db"));
- assertFalse(tc.wasTypeRequested("tableCount"));
- assertFalse(tc.wasTypeRequested("updatedCount"));
-
- }
-
- @Test
- public void testFieldInSelectListWithAlasNotTreatedAsType() throws Exception {
-
- ValidatingTypeCache tc = findTypeLookupsDuringQueryParsing("hive_db select name as theName");
- assertTrue(tc.wasTypeRequested("hive_db"));
- assertFalse(tc.wasTypeRequested("theName"));
- assertFalse(tc.wasTypeRequested("name"));
-
- }
-
- @Test
- public void testFieldInSelectListNotTreatedAsType() throws Exception {
-
-
- ValidatingTypeCache tc = findTypeLookupsDuringQueryParsing("hive_db select name");
- assertTrue(tc.wasTypeRequested("hive_db"));
- assertFalse(tc.wasTypeRequested("name"));
-
- }
-
- private ValidatingTypeCache findTypeLookupsDuringQueryParsing(String query) throws AtlasException {
- TypeSystem typeSystem = TypeSystem.getInstance();
- ValidatingTypeCache result = new ValidatingTypeCache();
- typeSystem.setTypeCache(result);
- typeSystem.reset();
- HierarchicalTypeDefinition<ClassType> hiveTypeDef = createClassTypeDef("hive_db", "", ImmutableSet.<String>of(),
- createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- createRequiredAttrDef("tableCount", DataTypes.INT_TYPE)
- );
- typeSystem.defineClassType(hiveTypeDef);
-
- Either<Parsers.NoSuccess, Expressions.Expression> either = QueryParser.apply(query, null);
- Expressions.Expression expression = either.right().get();
-
- QueryProcessor.validate(expression);
-
- return result;
- }
-
- private static class ValidatingTypeCache extends DefaultTypeCache {
-
- private Set<String> typesRequested = new HashSet<>();
-
- @Override
- public boolean has(String typeName) throws AtlasException {
- typesRequested.add(typeName);
- return super.has(typeName);
- }
-
- @Override
- public boolean has(TypeCategory typeCategory, String typeName) throws AtlasException {
- typesRequested.add(typeName);
- return super.has(typeCategory, typeName);
- }
-
- @Override
- public IDataType get(String typeName) throws AtlasException {
- typesRequested.add(typeName);
- return super.get(typeName);
- }
-
- @Override
- public IDataType get(TypeCategory typeCategory, String typeName) throws AtlasException {
- typesRequested.add(typeName);
- return super.get(typeCategory, typeName);
- }
-
- public boolean wasTypeRequested(String name) {
- return typesRequested.contains(name);
- }
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/BaseTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/BaseTest.java b/repository/src/test/java/org/apache/atlas/repository/BaseTest.java
deleted file mode 100755
index 7e1df67..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/BaseTest.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository;
-
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.annotations.BeforeMethod;
-
-public abstract class BaseTest {
- @BeforeMethod
- public void setup() throws Exception {
- TypeSystem ts = TypeSystem.getInstance();
- ts.reset();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java b/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java
index 551e6ab..47fd9cf 100644
--- a/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java
+++ b/repository/src/test/java/org/apache/atlas/repository/audit/AuditRepositoryTestBase.java
@@ -19,8 +19,8 @@
package org.apache.atlas.repository.audit;
import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.TestUtilsV2;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@@ -34,7 +34,7 @@ public class AuditRepositoryTestBase {
protected EntityAuditRepository eventRepository;
private String rand() {
- return TestUtils.randomString(10);
+ return TestUtilsV2.randomString(10);
}
@BeforeTest
@@ -44,37 +44,34 @@ public class AuditRepositoryTestBase {
@Test
public void testAddEvents() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
EntityAuditEvent event = new EntityAuditEvent(rand(), System.currentTimeMillis(), "u1",
EntityAuditEvent.EntityAuditAction.ENTITY_CREATE, "d1", new Referenceable(rand()));
eventRepository.putEvents(event);
- List<EntityAuditEvent> events =
- eventRepository.listEvents(event.getEntityId(), null, (short) 10);
+ List<EntityAuditEvent> events = eventRepository.listEvents(event.getEntityId(), null, (short) 10);
+
assertEquals(events.size(), 1);
assertEventEquals(events.get(0), event);
}
@Test
public void testListPagination() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- String id1 = "id1" + rand();
- String id2 = "id2" + rand();
- String id3 = "id3" + rand();
- long ts = System.currentTimeMillis();
- Referenceable entity = new Referenceable(rand());
+ String id1 = "id1" + rand();
+ String id2 = "id2" + rand();
+ String id3 = "id3" + rand();
+ long ts = System.currentTimeMillis();
+ Referenceable entity = new Referenceable(rand());
List<EntityAuditEvent> expectedEvents = new ArrayList<>(3);
+
for (int i = 0; i < 3; i++) {
//Add events for both ids
- EntityAuditEvent event = new EntityAuditEvent(id2, ts - i, "user" + i,
- EntityAuditEvent.EntityAuditAction.ENTITY_UPDATE, "details" + i, entity);
+ EntityAuditEvent event = new EntityAuditEvent(id2, ts - i, "user" + i, EntityAuditEvent.EntityAuditAction.ENTITY_UPDATE, "details" + i, entity);
+
eventRepository.putEvents(event);
expectedEvents.add(event);
- eventRepository.putEvents(new EntityAuditEvent(id1, ts - i, "user" + i,
- EntityAuditEvent.EntityAuditAction.TAG_DELETE, "details" + i, entity));
- eventRepository.putEvents(new EntityAuditEvent(id3, ts - i, "user" + i,
- EntityAuditEvent.EntityAuditAction.TAG_ADD, "details" + i, entity));
+ eventRepository.putEvents(new EntityAuditEvent(id1, ts - i, "user" + i, EntityAuditEvent.EntityAuditAction.TAG_DELETE, "details" + i, entity));
+ eventRepository.putEvents(new EntityAuditEvent(id3, ts - i, "user" + i, EntityAuditEvent.EntityAuditAction.TAG_ADD, "details" + i, entity));
}
//Use ts for which there is no event - ts + 2
@@ -92,8 +89,8 @@ public class AuditRepositoryTestBase {
@Test
public void testInvalidEntityId() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
List<EntityAuditEvent> events = eventRepository.listEvents(rand(), null, (short) 3);
+
assertEquals(events.size(), 0);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryHATest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryHATest.java b/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryHATest.java
deleted file mode 100644
index f66ac74..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryHATest.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.audit;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.ha.HAConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyZeroInteractions;
-import static org.mockito.Mockito.when;
-
-public class HBaseBasedAuditRepositoryHATest {
-
- @Mock
- private Configuration configuration;
-
- @Mock
- private org.apache.hadoop.conf.Configuration hbaseConf;
-
- @Mock
- private Connection connection;
-
- @BeforeMethod
- public void setup() {
- MockitoAnnotations.initMocks(this);
- }
-
- @Test
- public void testTableShouldNotBeCreatedOnStartIfHAIsEnabled() throws IOException, AtlasException {
- when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getString(HBaseBasedAuditRepository.CONFIG_TABLE_NAME,
- HBaseBasedAuditRepository.DEFAULT_TABLE_NAME)).
- thenReturn(HBaseBasedAuditRepository.DEFAULT_TABLE_NAME);
- HBaseBasedAuditRepository auditRepository = new HBaseBasedAuditRepository() {
- @Override
- protected Connection createConnection(org.apache.hadoop.conf.Configuration hbaseConf) {
- return connection;
- }
- };
- auditRepository.startInternal(configuration, hbaseConf);
-
- verifyZeroInteractions(connection);
- }
-
- @Test
- public void testShouldCreateTableWhenReactingToActive() throws AtlasException, IOException {
- when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
- when(configuration.getString(HBaseBasedAuditRepository.CONFIG_TABLE_NAME,
- HBaseBasedAuditRepository.DEFAULT_TABLE_NAME)).
- thenReturn(HBaseBasedAuditRepository.DEFAULT_TABLE_NAME);
- TableName tableName = TableName.valueOf(HBaseBasedAuditRepository.DEFAULT_TABLE_NAME);
- Admin admin = mock(Admin.class);
- when(connection.getAdmin()).thenReturn(admin);
- when(admin.tableExists(tableName)).thenReturn(true);
- HBaseBasedAuditRepository auditRepository = new HBaseBasedAuditRepository() {
- @Override
- protected Connection createConnection(org.apache.hadoop.conf.Configuration hbaseConf) {
- return connection;
- }
- };
- auditRepository.startInternal(configuration, hbaseConf);
- auditRepository.instanceIsActive();
-
- verify(connection).getAdmin();
- verify(admin).tableExists(tableName);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryTest.java b/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryTest.java
deleted file mode 100644
index a9dfee3..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseBasedAuditRepositoryTest.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.audit;
-
-import org.apache.atlas.ApplicationProperties;
-import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.TestUtils;
-import org.apache.commons.configuration.Configuration;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import static org.testng.Assert.assertNull;
-import static org.testng.Assert.assertTrue;
-
-public class HBaseBasedAuditRepositoryTest extends AuditRepositoryTestBase {
- private TableName tableName;
-
- @BeforeClass
- public void setup() throws Exception {
- //ATLAS-1591 Currently, some tests are skipped for titan1 backened. As these tests are hard coded to use Gremlin2. See ATLAS-1591 once it is fixed, please remove it.
-
- TestUtils.skipForGremlin3EnabledGraphDb();
- eventRepository = new HBaseBasedAuditRepository();
- HBaseTestUtils.startCluster();
- ((HBaseBasedAuditRepository) eventRepository).start();
-
- Configuration properties = ApplicationProperties.get();
- String tableNameStr = properties.getString(HBaseBasedAuditRepository.CONFIG_TABLE_NAME,
- HBaseBasedAuditRepository.DEFAULT_TABLE_NAME);
- tableName = TableName.valueOf(tableNameStr);
- }
-
- @AfterClass
- public void teardown() throws Exception {
- ((HBaseBasedAuditRepository) eventRepository).stop();
- HBaseTestUtils.stopCluster();
- }
-
- @Test
- public void testTableCreated() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- Connection connection = HBaseTestUtils.getConnection();
- Admin admin = connection.getAdmin();
- assertTrue(admin.tableExists(tableName));
- }
-
- @Override
- protected void assertEventEquals(EntityAuditEvent actual, EntityAuditEvent expected) {
- super.assertEventEquals(actual, expected);
- assertNull(actual.getEntityDefinition());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/audit/HBaseTestUtils.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseTestUtils.java b/repository/src/test/java/org/apache/atlas/repository/audit/HBaseTestUtils.java
deleted file mode 100644
index 984acfe..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/audit/HBaseTestUtils.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.audit;
-
-import org.apache.atlas.ApplicationProperties;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.LocalHBaseCluster;
-import org.apache.hadoop.hbase.client.Connection;
-
-import java.io.IOException;
-
-public class HBaseTestUtils {
- private static HBaseTestingUtility hbaseTestUtility;
- private static LocalHBaseCluster hbaseCluster;
-
- public static void startCluster() throws Exception {
- Configuration hbaseConf =
- HBaseBasedAuditRepository.getHBaseConfiguration(ApplicationProperties.get());
- hbaseTestUtility = HBaseTestingUtility.createLocalHTU(hbaseConf);
- int zkPort = hbaseConf.getInt("hbase.zookeeper.property.clientPort", 19026);
- hbaseTestUtility.startMiniZKCluster(1, zkPort);
-
- hbaseCluster = new LocalHBaseCluster(hbaseTestUtility.getConfiguration());
- hbaseCluster.startup();
- }
-
- public static void stopCluster() throws Exception {
- hbaseTestUtility.getConnection().close();
- hbaseCluster.shutdown();
- hbaseTestUtility.shutdownMiniZKCluster();
- }
-
- public static Connection getConnection() throws IOException {
- return hbaseTestUtility.getConnection();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/AbstractGremlinQueryOptimizerTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/AbstractGremlinQueryOptimizerTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/AbstractGremlinQueryOptimizerTest.java
deleted file mode 100644
index 88de2c4..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/AbstractGremlinQueryOptimizerTest.java
+++ /dev/null
@@ -1,708 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.gremlin.optimizer.GremlinQueryOptimizer;
-import org.apache.atlas.gremlin.optimizer.RangeFinder;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.apache.atlas.query.GraphPersistenceStrategies;
-import org.apache.atlas.query.TypeUtils.FieldInfo;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.GremlinVersion;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
-
-public abstract class AbstractGremlinQueryOptimizerTest implements IAtlasGraphProvider {
-
- protected abstract GremlinExpressionFactory getFactory();
-
- private MetadataRepository repo;
- private final GraphPersistenceStrategies STRATEGY = mock(GraphPersistenceStrategies.class);
-
- @BeforeClass
- public void setUp() throws RepositoryException {
- GremlinQueryOptimizer.reset();
- GremlinQueryOptimizer.setExpressionFactory(getFactory());
- when(STRATEGY.typeAttributeName()).thenReturn(Constants.ENTITY_TYPE_PROPERTY_KEY);
- when(STRATEGY.superTypeAttributeName()).thenReturn(Constants.SUPER_TYPES_PROPERTY_KEY);
- repo = new GraphBackedMetadataRepository(new HardDeleteHandler(TypeSystem.getInstance()), this.get());
- }
-
- private FieldInfo getTestFieldInfo() throws AtlasException {
- AttributeDefinition def = new AttributeDefinition("foo", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null);
- AttributeInfo attrInfo = new AttributeInfo(TypeSystem.getInstance(), def, null);
- return new FieldInfo(DataTypes.STRING_TYPE, attrInfo, null, null);
- }
-
- private GroovyExpression getVerticesExpression() {
- IdentifierExpression g = new IdentifierExpression("g");
- return new FunctionCallExpression(TraversalStepType.START, g, "V");
- }
-
-
- @Test
- public void testPullHasExpressionsOutOfAnd() throws AtlasException {
-
- GroovyExpression expr1 = makeOutExpression(null, "out1");
- GroovyExpression expr2 = makeOutExpression(null, "out2");
- GroovyExpression expr3 = makeHasExpression("prop1","Fred");
- GroovyExpression expr4 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(expr1, expr2, expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestPullHasExpressionsOutOfHas());
- }
-
- protected abstract String getExpectedGremlinForTestPullHasExpressionsOutOfHas();
-
-
- @Test
- public void testOrGrouping() throws AtlasException {
- GroovyExpression expr1 = makeOutExpression(null, "out1");
- GroovyExpression expr2 = makeOutExpression(null, "out2");
- GroovyExpression expr3 = makeHasExpression("prop1","Fred");
- GroovyExpression expr4 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2, expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestOrGrouping());
- }
-
- protected abstract String getExpectedGremlinForTestOrGrouping();
-
-
- @Test
- public void testAndOfOrs() throws AtlasException {
-
- GroovyExpression or1Cond1 = makeHasExpression("p1","e1");
- GroovyExpression or1Cond2 = makeHasExpression("p2","e2");
- GroovyExpression or2Cond1 = makeHasExpression("p3","e3");
- GroovyExpression or2Cond2 = makeHasExpression("p4","e4");
-
- GroovyExpression or1 = getFactory().generateLogicalExpression(null, "or", Arrays.asList(or1Cond1, or1Cond2));
- GroovyExpression or2 = getFactory().generateLogicalExpression(null, "or", Arrays.asList(or2Cond1, or2Cond2));
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(or1, or2));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAndOfOrs());
-
- }
-
- protected abstract String getExpectedGremlinForTestAndOfOrs();
-
- @Test
- public void testAndWithMultiCallArguments() throws AtlasException {
-
- GroovyExpression cond1 = makeHasExpression("p1","e1");
- GroovyExpression cond2 = makeHasExpression(cond1, "p2","e2");
- GroovyExpression cond3 = makeHasExpression("p3","e3");
- GroovyExpression cond4 = makeHasExpression(cond3, "p4","e4");
-
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(cond2, cond4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAndWithMultiCallArguments());
- }
-
-
- protected abstract String getExpectedGremlinForTestAndWithMultiCallArguments();
-
- @Test
- public void testOrOfAnds() throws AtlasException {
-
- GroovyExpression or1Cond1 = makeHasExpression("p1","e1");
- GroovyExpression or1Cond2 = makeHasExpression("p2","e2");
- GroovyExpression or2Cond1 = makeHasExpression("p3","e3");
- GroovyExpression or2Cond2 = makeHasExpression("p4","e4");
-
- GroovyExpression or1 = getFactory().generateLogicalExpression(null, "and", Arrays.asList(or1Cond1, or1Cond2));
- GroovyExpression or2 = getFactory().generateLogicalExpression(null, "and", Arrays.asList(or2Cond1, or2Cond2));
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(or1, or2));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestOrOfAnds());
- }
-
- protected abstract String getExpectedGremlinForTestOrOfAnds();
-
- @Test
- public void testHasNotMovedToResult() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- GroovyExpression or1Cond1 = makeHasExpression("p1","e1");
- GroovyExpression or1Cond2 = makeHasExpression("p2","e2");
-
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or1Cond1, or1Cond2));
- toOptimize = makeHasExpression(toOptimize, "p3","e3");
- toOptimize = getFactory().generateAliasExpression(toOptimize, "_src");
- toOptimize = getFactory().generateSelectExpression(toOptimize, Collections.singletonList(new LiteralExpression("src1")), Collections.<GroovyExpression>singletonList(new IdentifierExpression("it")));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(),
- getExpectedGremlinForTestHasNotMovedToResult());
- }
-
- protected abstract String getExpectedGremlinForTestHasNotMovedToResult();
-
- @Test
- public void testOptimizeLoopExpression() throws AtlasException {
-
-
- GroovyExpression input = getVerticesExpression();
- input = getFactory().generateTypeTestExpression(STRATEGY, input, "DataSet", TestIntSequence.INSTANCE).get(0);
- input = makeHasExpression(input, "name","Fred");
- input = getFactory().generateAliasExpression(input, "label");
-
-
- GroovyExpression loopExpr = getFactory().getLoopExpressionParent(input);
- loopExpr = getFactory().generateAdjacentVerticesExpression(loopExpr, AtlasEdgeDirection.IN, "inputTables");
- loopExpr = getFactory().generateAdjacentVerticesExpression(loopExpr, AtlasEdgeDirection.OUT, "outputTables");
- GroovyExpression result = getFactory().generateLoopExpression(input, STRATEGY, DataTypes.STRING_TYPE, loopExpr, "label", null);
- result = getFactory().generateToListExpression(result);
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(result);
-
- assertEquals(optimized.toString(), getExpectedGremlinForOptimizeLoopExpression());
- }
-
- protected abstract String getExpectedGremlinForOptimizeLoopExpression();
-
- @Test
- public void testLongStringEndingWithOr() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = makeHasExpression(toOptimize, "name","Fred");
- toOptimize = makeHasExpression(toOptimize, "age","13");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- toOptimize = makeHasExpression(toOptimize, "state","Massachusetts");
-
- GroovyExpression or1cond1 = makeHasExpression("p1", "e1");
- GroovyExpression or1cond2 = makeHasExpression("p2", "e2");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or1cond1, or1cond2));
-
- GroovyExpression or2cond1 = makeHasExpression("p3", "e3");
- GroovyExpression or2cond2 = makeHasExpression("p4", "e4");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or2cond1, or2cond2));
- toOptimize = makeHasExpression(toOptimize, "p5","e5");
- toOptimize = makeHasExpression(toOptimize, "p6","e6");
- GroovyExpression or3cond1 = makeHasExpression("p7", "e7");
- GroovyExpression or3cond2 = makeHasExpression("p8", "e8");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or3cond1, or3cond2));
-
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestLongStringEndingWithOr());
- }
-
- protected abstract String getExpectedGremlinForTestLongStringEndingWithOr();
-
- @Test
- public void testLongStringNotEndingWithOr() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = makeHasExpression(toOptimize, "name","Fred");
- toOptimize = makeHasExpression(toOptimize, "age","13");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- toOptimize = makeHasExpression(toOptimize, "state","Massachusetts");
-
- GroovyExpression or1cond1 = makeHasExpression("p1", "e1");
- GroovyExpression or1cond2 = makeHasExpression("p2", "e2");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or1cond1, or1cond2));
-
- GroovyExpression or2cond1 = makeHasExpression("p3", "e3");
- GroovyExpression or2cond2 = makeHasExpression("p4", "e4");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or2cond1, or2cond2));
- toOptimize = makeHasExpression(toOptimize, "p5","e5");
- toOptimize = makeHasExpression(toOptimize, "p6","e6");
- GroovyExpression or3cond1 = makeHasExpression("p7", "e7");
- GroovyExpression or3cond2 = makeHasExpression("p8", "e8");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(or3cond1, or3cond2));
- toOptimize = makeHasExpression(toOptimize, "p9","e9");
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestLongStringNotEndingWithOr());
- }
-
- protected abstract String getExpectedGremlinForTestLongStringNotEndingWithOr();
-
- @Test
- public void testToListConversion() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2));
- toOptimize = new FunctionCallExpression(TraversalStepType.END, toOptimize,"toList");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestToListConversion());
- }
-
- protected abstract String getExpectedGremlinForTestToListConversion();
-
- @Test
- public void testToListWithExtraStuff() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2));
- toOptimize = new FunctionCallExpression(TraversalStepType.END, toOptimize,"toList");
- toOptimize = new FunctionCallExpression(toOptimize,"size");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestToListWithExtraStuff());
-
- }
-
- protected abstract String getExpectedGremlinForTestToListWithExtraStuff();
-
- public void testAddClosureWithExitExpressionDifferentFromExpr() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2));
- toOptimize = makeOutExpression(toOptimize, "knows");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- toOptimize = new FunctionCallExpression(TraversalStepType.END, toOptimize,"toList");
- toOptimize = new FunctionCallExpression(toOptimize,"size");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAddClosureWithExitExpressionDifferentFromExpr());
-
- }
-
- protected abstract String getExpectedGremlinForTestAddClosureWithExitExpressionDifferentFromExpr();
-
- @Test
- public void testAddClosureNoExitExpression() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2));
- toOptimize = makeOutExpression(toOptimize, "knows");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAddClosureNoExitExpression());
- }
-
- protected abstract String getExpectedGremlinForTestAddClosureNoExitExpression();
-
-
- private GroovyExpression makeOutExpression(GroovyExpression parent, String label) {
- return getFactory().generateAdjacentVerticesExpression(parent, AtlasEdgeDirection.OUT, label);
- }
-
- @Test
- public void testAddClosureWithExitExpressionEqualToExpr() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1, expr2));
-
- toOptimize = makeOutExpression(toOptimize, "knows");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- toOptimize = new FunctionCallExpression(TraversalStepType.END, toOptimize,"toList");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAddClosureWithExitExpressionEqualToExpr());
- }
-
- protected abstract String getExpectedGremlinForTestAddClosureWithExitExpressionEqualToExpr();
-
-
- @Test
- public void testClosureNotCreatedWhenNoOrs() throws AtlasException {
-
- GroovyExpression expr1 = makeHasExpression("prop1","Fred");
- GroovyExpression expr2 = makeHasExpression("prop2","George");
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(expr1, expr2));
- toOptimize = makeOutExpression(toOptimize, "knows");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestClosureNotCreatedWhenNoOrs());
- }
-
- protected abstract String getExpectedGremlinForTestClosureNotCreatedWhenNoOrs();
-
-
- private GroovyExpression makeHasExpression(String name, String value) throws AtlasException {
- return makeHasExpression(null, name, value);
- }
- private GroovyExpression makeHasExpression(GroovyExpression parent, String name, String value) throws AtlasException {
- return getFactory().generateHasExpression(STRATEGY, parent, name, "=", new LiteralExpression(value), getTestFieldInfo());
- }
- private GroovyExpression makeFieldExpression(GroovyExpression parent, String fieldName) throws AtlasException {
- return getFactory().generateFieldExpression(parent, getTestFieldInfo(), fieldName, false);
- }
-
- @Test
- public void testOrFollowedByAnd() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1,expr2));
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Arrays.asList(expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestOrFollowedByAnd());
- }
-
- protected abstract String getExpectedGremlinForTestOrFollowedByAnd();
-
- @Test
- public void testOrFollowedByOr() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "or", Arrays.asList(expr1,expr2));
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestOrFollowedByOr());
- }
-
- protected abstract String getExpectedGremlinForTestOrFollowedByOr();
-
- @Test
- public void testMassiveOrExpansion() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = makeHasExpression(toOptimize, "h1","h2");
- toOptimize = makeHasExpression(toOptimize, "h3","h4");
- for(int i = 0; i < 5; i++) {
- GroovyExpression expr1 = makeHasExpression("p1" + i,"e1" + i);
- GroovyExpression expr2 = makeHasExpression("p2" + i,"e2" + i);
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1,expr2));
- toOptimize = makeHasExpression(toOptimize, "ha" + i,"hb" + i);
- toOptimize = makeHasExpression(toOptimize, "hc" + i,"hd" + i);
- }
- toOptimize = makeHasExpression(toOptimize, "h5","h6");
- toOptimize = makeHasExpression(toOptimize, "h7","h8");
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestMassiveOrExpansion());
- }
-
- protected abstract String getExpectedGremlinForTestMassiveOrExpansion();
-
- @Test
- public void testAndFollowedByAnd() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(expr1,expr2));
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Arrays.asList(expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAndFollowedByAnd());
-
-
- }
-
- protected abstract String getExpectedGremlinForTestAndFollowedByAnd();
-
- @Test
- public void testAndFollowedByOr() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
- GroovyExpression toOptimize = getFactory().generateLogicalExpression(getVerticesExpression(), "and", Arrays.asList(expr1,expr2));
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr3, expr4));
-
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAndFollowedByOr());
- }
-
- protected abstract String getExpectedGremlinForTestAndFollowedByOr();
-
- @Test
- public void testInitialAlias() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
-
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateAliasExpression(toOptimize, "x");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestInitialAlias());
- }
-
- protected abstract String getExpectedGremlinForTestInitialAlias();
-
- @Test
- public void testFinalAlias() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- toOptimize = getFactory().generateAliasExpression(toOptimize, "x");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestFinalAlias());
- }
-
- protected abstract String getExpectedGremlinForTestFinalAlias();
-
- @Test
- public void testAliasInMiddle() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- toOptimize = getFactory().generateAliasExpression(toOptimize, "x");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr3, expr4));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAliasInMiddle());
- }
-
- protected abstract String getExpectedGremlinForTestAliasInMiddle();
-
- @Test
- public void testMultipleAliases() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression("name","George");
- GroovyExpression expr3 = makeHasExpression("age","13");
- GroovyExpression expr4 = makeHasExpression("age","14");
-
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- toOptimize = getFactory().generateAliasExpression(toOptimize, "x");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr3, expr4));
- toOptimize = getFactory().generateAliasExpression(toOptimize, "y");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGreminForTestMultipleAliases());
- }
-
- protected abstract String getExpectedGreminForTestMultipleAliases();
-
- @Test
- public void testAliasInOrExpr() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = getFactory().generateAliasExpression(makeHasExpression("name","George"), "george");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestAliasInOrExpr());
- }
-
- protected abstract String getExpectedGremlinForTestAliasInOrExpr();
-
- @Test
- public void testAliasInAndExpr() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = getFactory().generateAliasExpression(makeHasExpression("name","George"), "george");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Arrays.asList(expr1, expr2));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- //expression with alias cannot currently be pulled out of the and
- assertEquals(optimized.toString(), getExpectedGremlinForTestAliasInAndExpr());
- }
-
-
- protected abstract String getExpectedGremlinForTestAliasInAndExpr();
- @Test
- public void testFlatMapExprInAnd() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression(makeOutExpression(null,"knows"), "name","George");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Arrays.asList(expr1, expr2));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestFlatMapExprInAnd());
- }
-
-
- protected abstract String getExpectedGremlinForTestFlatMapExprInAnd();
- @Test
- public void testFlatMapExprInOr() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression(makeOutExpression(null,"knows"), "name","George");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestFlatMapExprInOr());
- }
-
- protected abstract String getExpectedGremlinForTestFlatMapExprInOr();
-
- @Test
- public void testFieldExpressionPushedToResultExpression() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression("name","Fred");
- GroovyExpression expr2 = makeHasExpression(makeOutExpression(null,"knows"), "name","George");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- toOptimize = makeFieldExpression(toOptimize, "name");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestFieldExpressionPushedToResultExpression());
- }
-
- protected abstract String getExpectedGremlinForTestFieldExpressionPushedToResultExpression();
-
- @Test
- public void testOrWithNoChildren() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- GroovyExpression expr1 = makeHasExpression(toOptimize, "name","Fred");
-
- toOptimize = getFactory().generateLogicalExpression(expr1, "or", Collections.<GroovyExpression>emptyList());
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- //or with no children matches no vertices
- assertEquals(optimized.toString(), getExpectedGremlinFortestOrWithNoChildren());
- }
-
- protected abstract String getExpectedGremlinFortestOrWithNoChildren();
-
- @Test
- public void testFinalAliasNeeded() throws AtlasException {
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = makeHasExpression(toOptimize, "name", "Fred");
- toOptimize = getFactory().generateAliasExpression(toOptimize, "person");
- toOptimize = makeOutExpression(toOptimize, "livesIn");
- GroovyExpression isChicago = makeHasExpression(null, "name", "Chicago");
- GroovyExpression isBoston = makeHasExpression(null, "name", "Boston");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(isChicago, isBoston));
- toOptimize = getFactory().generateAliasExpression(toOptimize, "city");
- toOptimize = makeOutExpression(toOptimize, "state");
- toOptimize = makeHasExpression(toOptimize, "name", "Massachusetts");
- toOptimize = getFactory().generatePathExpression(toOptimize);
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestFinalAliasNeeded());
- }
-
- protected abstract String getExpectedGremlinForTestFinalAliasNeeded();
-
- @Test
- public void testSimpleRangeExpression() throws AtlasException {
- GroovyExpression expr1 = makeHasExpression(null, "name","Fred");
- GroovyExpression expr2 = makeHasExpression(null, "name","George");
- GroovyExpression expr3 = makeHasExpression(null, "age","34");
- GroovyExpression expr4 = makeHasExpression(null, "size","small");
-
- GroovyExpression toOptimize = getVerticesExpression();
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr1, expr2));
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Collections.singletonList(expr3));
- toOptimize = getFactory().generateAdjacentVerticesExpression(toOptimize, AtlasEdgeDirection.OUT, "eats");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "and", Collections.singletonList(expr4));
- toOptimize = makeHasExpression(toOptimize, "color","blue");
- toOptimize = getFactory().generateRangeExpression(toOptimize, 0, 10);
- toOptimize = new FunctionCallExpression(TraversalStepType.END, toOptimize, "toList");
- toOptimize = new FunctionCallExpression(toOptimize, "size");
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestSimpleRangeExpression());
- }
-
- protected abstract String getExpectedGremlinForTestSimpleRangeExpression();
-
-
- @Test
- public void testRangeWithNonZeroOffset() throws Exception {
- // g.V().or(has('__typeName','OMAS_OMRSAsset'),has('__superTypeNames','OMAS_OMRSAsset')).range(5,10).as('inst').select('inst')
- GroovyExpression toOptimize = getVerticesExpression();
-
- GroovyExpression expr0 = makeHasExpression("__typeName", "OMAS_OMRSAsset");
- GroovyExpression expr1 = makeHasExpression("__superTypeNames", "OMAS_OMRSAsset");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr0, expr1));
- toOptimize = getFactory().generateRangeExpression(toOptimize, 5, 10);
- toOptimize = getFactory().generateAliasExpression(toOptimize, "inst");
- toOptimize = getFactory().generateSelectExpression(toOptimize, Collections.singletonList(new LiteralExpression("inst")), Collections.<GroovyExpression>emptyList());
- RangeFinder visitor = new RangeFinder(getFactory());
- GremlinQueryOptimizer.visitCallHierarchy(toOptimize, visitor);
- List<AbstractFunctionExpression> rangeExpressions = visitor.getRangeExpressions();
- assertEquals(rangeExpressions.size(), 1);
- int[] rangeParameters = getFactory().getRangeParameters(rangeExpressions.get(0));
- assertNotNull(rangeParameters);
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- // The range optimization is not supported with a non-zero start index, so the optimizer should not add range expressions
- // to the expanded or's.
- assertEquals(optimized.toString(), getExpectedGremlinForTestRangeWithNonZeroOffset());
- }
-
- protected abstract String getExpectedGremlinForTestRangeWithNonZeroOffset();
-
- @Test
- public void testRangeWithOrderBy() throws Exception {
- // The range optimization is not supported with order, so the optimizer should not add range expressions
- // to the expanded or's.
- GroovyExpression toOptimize = getVerticesExpression();
-
- GroovyExpression expr0 = makeHasExpression("__typeName", "OMAS_OMRSAsset");
- GroovyExpression expr1 = makeHasExpression("__superTypeNames", "OMAS_OMRSAsset");
- toOptimize = getFactory().generateLogicalExpression(toOptimize, "or", Arrays.asList(expr0, expr1));
- toOptimize = getFactory().generateRangeExpression(toOptimize, 5, 10);
- toOptimize = getFactory().generateAliasExpression(toOptimize, "inst");
- //toOptimize = getFactory().generateSelectExpression(toOptimize, Collections.singletonList(new LiteralExpression("inst")), Collections.<GroovyExpression>emptyList());
- GroovyExpression orderFielda = makeFieldExpression(getFactory().getCurrentTraverserObject(getFactory().getClosureArgumentValue()), "name");
- GroovyExpression orderFieldb = makeFieldExpression(getFactory().getCurrentTraverserObject(getFactory().getClosureArgumentValue()), "name");
- toOptimize = getFactory().generateOrderByExpression(toOptimize,Arrays.asList(orderFielda, orderFieldb), true);
- RangeFinder visitor = new RangeFinder(getFactory());
- GremlinQueryOptimizer.visitCallHierarchy(toOptimize, visitor);
- List<AbstractFunctionExpression> rangeExpressions = visitor.getRangeExpressions();
- assertEquals(rangeExpressions.size(), 1);
- int[] rangeParameters = getFactory().getRangeParameters(rangeExpressions.get(0));
- assertNotNull(rangeParameters);
- GroovyExpression optimized = GremlinQueryOptimizer.getInstance().optimize(toOptimize);
- assertEquals(optimized.toString(), getExpectedGremlinForTestRangeWithOrderBy());
- }
-
-
-
- protected abstract String getExpectedGremlinForTestRangeWithOrderBy();
-
- @Override
- public AtlasGraph get() throws RepositoryException {
- AtlasGraph graph = mock(AtlasGraph.class);
- when(graph.getSupportedGremlinVersion()).thenReturn(GremlinVersion.THREE);
- when(graph.isPropertyValueConversionNeeded(any(IDataType.class))).thenReturn(false);
- return graph;
- }
-}
[37/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationBaseMessage.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationBaseMessage.java b/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationBaseMessage.java
deleted file mode 100644
index 3b377de..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationBaseMessage.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification;
-
-
-import org.apache.atlas.AtlasConfiguration;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.commons.codec.binary.StringUtils;
-import org.apache.commons.compress.utils.IOUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
-
-
-public class AtlasNotificationBaseMessage {
- private static final Logger LOG = LoggerFactory.getLogger(AtlasNotificationBaseMessage.class);
-
- public static final int MESSAGE_MAX_LENGTH_BYTES = AtlasConfiguration.NOTIFICATION_MESSAGE_MAX_LENGTH_BYTES.getInt() - 512; // 512 bytes for envelop;
- public static final boolean MESSAGE_COMPRESSION_ENABLED = AtlasConfiguration.NOTIFICATION_MESSAGE_COMPRESSION_ENABLED.getBoolean();
-
- public enum CompressionKind { NONE, GZIP };
-
- private MessageVersion version = null;
- private String msgId = null;
- private CompressionKind msgCompressionKind = CompressionKind.NONE;
- private int msgSplitIdx = 1;
- private int msgSplitCount = 1;
-
-
- public AtlasNotificationBaseMessage() {
- }
-
- public AtlasNotificationBaseMessage(MessageVersion version) {
- this(version, null, CompressionKind.NONE);
- }
-
- public AtlasNotificationBaseMessage(MessageVersion version, String msgId, CompressionKind msgCompressionKind) {
- this.version = version;
- this.msgId = msgId;
- this.msgCompressionKind = msgCompressionKind;
- }
-
- public AtlasNotificationBaseMessage(MessageVersion version, String msgId, CompressionKind msgCompressionKind, int msgSplitIdx, int msgSplitCount) {
- this.version = version;
- this.msgId = msgId;
- this.msgCompressionKind = msgCompressionKind;
- this.msgSplitIdx = msgSplitIdx;
- this.msgSplitCount = msgSplitCount;
- }
-
- public void setVersion(MessageVersion version) {
- this.version = version;
- }
-
- public MessageVersion getVersion() {
- return version;
- }
-
- public String getMsgId() {
- return msgId;
- }
-
- public void setMsgId(String msgId) {
- this.msgId = msgId;
- }
-
- public CompressionKind getMsgCompressionKind() {
- return msgCompressionKind;
- }
-
- public void setMsgCompressed(CompressionKind msgCompressionKind) {
- this.msgCompressionKind = msgCompressionKind;
- }
-
- public int getMsgSplitIdx() {
- return msgSplitIdx;
- }
-
- public void setMsgSplitIdx(int msgSplitIdx) {
- this.msgSplitIdx = msgSplitIdx;
- }
-
- public int getMsgSplitCount() {
- return msgSplitCount;
- }
-
- public void setMsgSplitCount(int msgSplitCount) {
- this.msgSplitCount = msgSplitCount;
- }
-
- /**
- * Compare the version of this message with the given version.
- *
- * @param compareToVersion the version to compare to
- *
- * @return a negative integer, zero, or a positive integer as this message's version is less than, equal to,
- * or greater than the given version.
- */
- public int compareVersion(MessageVersion compareToVersion) {
- return version.compareTo(compareToVersion);
- }
-
-
- public static byte[] getBytesUtf8(String str) {
- return StringUtils.getBytesUtf8(str);
- }
-
- public static String getStringUtf8(byte[] bytes) {
- return StringUtils.newStringUtf8(bytes);
- }
-
- public static byte[] encodeBase64(byte[] bytes) {
- return Base64.encodeBase64(bytes);
- }
-
- public static byte[] decodeBase64(byte[] bytes) {
- return Base64.decodeBase64(bytes);
- }
-
- public static byte[] gzipCompressAndEncodeBase64(byte[] bytes) {
- return encodeBase64(gzipCompress(bytes));
- }
-
- public static byte[] decodeBase64AndGzipUncompress(byte[] bytes) {
- return gzipUncompress(decodeBase64(bytes));
- }
-
- public static String gzipCompress(String str) {
- byte[] bytes = getBytesUtf8(str);
- byte[] compressedBytes = gzipCompress(bytes);
- byte[] encodedBytes = encodeBase64(compressedBytes);
-
- return getStringUtf8(encodedBytes);
- }
-
- public static String gzipUncompress(String str) {
- byte[] encodedBytes = getBytesUtf8(str);
- byte[] compressedBytes = decodeBase64(encodedBytes);
- byte[] bytes = gzipUncompress(compressedBytes);
-
- return getStringUtf8(bytes);
- }
-
- public static byte[] gzipCompress(byte[] content) {
- ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
-
- try {
- GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
-
- gzipOutputStream.write(content);
- gzipOutputStream.close();
- } catch (IOException e) {
- LOG.error("gzipCompress(): error compressing {} bytes", content.length, e);
-
- throw new RuntimeException(e);
- }
-
- return byteArrayOutputStream.toByteArray();
- }
-
- public static byte[] gzipUncompress(byte[] content) {
- ByteArrayOutputStream out = new ByteArrayOutputStream();
-
- try {
- IOUtils.copy(new GZIPInputStream(new ByteArrayInputStream(content)), out);
- } catch (IOException e) {
- LOG.error("gzipUncompress(): error uncompressing {} bytes", content.length, e);
- }
-
- return out.toByteArray();
- }
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessage.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessage.java b/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessage.java
deleted file mode 100644
index 63d93c9..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessage.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification;
-
-import org.joda.time.DateTimeZone;
-import org.joda.time.Instant;
-
-/**
- * Represents a notification message that is associated with a version.
- */
-public class AtlasNotificationMessage<T> extends AtlasNotificationBaseMessage {
- private String msgSourceIP;
- private String msgCreatedBy;
- private long msgCreationTime;
-
- /**
- * The actual message.
- */
- private final T message;
-
-
- // ----- Constructors ----------------------------------------------------
-
- /**
- * Create a notification message.
- *
- * @param version the message version
- * @param message the actual message
- */
- public AtlasNotificationMessage(MessageVersion version, T message) {
- this(version, message, null, null);
- }
-
- public AtlasNotificationMessage(MessageVersion version, T message, String msgSourceIP, String createdBy) {
- super(version);
-
- this.msgSourceIP = msgSourceIP;
- this.msgCreatedBy = createdBy;
- this.msgCreationTime = Instant.now().toDateTime(DateTimeZone.UTC).getMillis();
- this.message = message;
- }
-
-
- public String getMsgSourceIP() {
- return msgSourceIP;
- }
-
- public void setMsgSourceIP(String msgSourceIP) {
- this.msgSourceIP = msgSourceIP;
- }
-
- public String getMsgCreatedBy() {
- return msgCreatedBy;
- }
-
- public void setMsgCreatedBy(String msgCreatedBy) {
- this.msgCreatedBy = msgCreatedBy;
- }
-
- public long getMsgCreationTime() {
- return msgCreationTime;
- }
-
- public void setMsgCreationTime(long msgCreationTime) {
- this.msgCreationTime = msgCreationTime;
- }
-
- public T getMessage() {
- return message;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessageDeserializer.java b/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessageDeserializer.java
index 2a175ba..d6e6878 100644
--- a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessageDeserializer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationMessageDeserializer.java
@@ -19,14 +19,17 @@
package org.apache.atlas.notification;
import com.google.common.annotations.VisibleForTesting;
-import com.google.gson.Gson;
-import org.apache.atlas.notification.AtlasNotificationBaseMessage.CompressionKind;
+import org.apache.atlas.model.notification.AtlasNotificationBaseMessage;
+import org.apache.atlas.model.notification.AtlasNotificationBaseMessage.CompressionKind;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.AtlasNotificationStringMessage;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.model.notification.MessageVersion;
import org.apache.commons.lang3.StringUtils;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.lang.reflect.ParameterizedType;
-import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -47,11 +50,10 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
public static final String VERSION_MISMATCH_MSG =
"Notification message version mismatch. Expected %s but recieved %s. Message %s";
- private final Type notificationMessageType;
- private final Type messageType;
- private final MessageVersion expectedVersion;
- private final Logger notificationLogger;
- private final Gson gson;
+ private final TypeReference<T> messageType;
+ private final TypeReference<AtlasNotificationMessage<T>> notificationMessageType;
+ private final MessageVersion expectedVersion;
+ private final Logger notificationLogger;
private final Map<String, SplitMessageAggregator> splitMsgBuffer = new HashMap<>();
@@ -65,33 +67,40 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
/**
* Create a notification message deserializer.
*
- * @param notificationMessageType the type of the notification message
* @param expectedVersion the expected message version
- * @param gson JSON serialization/deserialization
* @param notificationLogger logger for message version mismatch
*/
- public AtlasNotificationMessageDeserializer(Type notificationMessageType, MessageVersion expectedVersion,
- Gson gson, Logger notificationLogger) {
- this(notificationMessageType, expectedVersion, gson, notificationLogger,
+ public AtlasNotificationMessageDeserializer(TypeReference<T> messageType,
+ TypeReference<AtlasNotificationMessage<T>> notificationMessageType,
+ MessageVersion expectedVersion, Logger notificationLogger) {
+ this(messageType, notificationMessageType, expectedVersion, notificationLogger,
NOTIFICATION_SPLIT_MESSAGE_SEGMENTS_WAIT_TIME_SECONDS.getLong() * 1000,
NOTIFICATION_SPLIT_MESSAGE_BUFFER_PURGE_INTERVAL_SECONDS.getLong() * 1000);
}
- public AtlasNotificationMessageDeserializer(Type notificationMessageType, MessageVersion expectedVersion,
- Gson gson, Logger notificationLogger,
+ public AtlasNotificationMessageDeserializer(TypeReference<T> messageType,
+ TypeReference<AtlasNotificationMessage<T>> notificationMessageType,
+ MessageVersion expectedVersion,
+ Logger notificationLogger,
long splitMessageSegmentsWaitTimeMs,
long splitMessageBufferPurgeIntervalMs) {
+ this.messageType = messageType;
this.notificationMessageType = notificationMessageType;
- this.messageType = ((ParameterizedType) notificationMessageType).getActualTypeArguments()[0];
this.expectedVersion = expectedVersion;
- this.gson = gson;
this.notificationLogger = notificationLogger;
this.splitMessageSegmentsWaitTimeMs = splitMessageSegmentsWaitTimeMs;
this.splitMessageBufferPurgeIntervalMs = splitMessageBufferPurgeIntervalMs;
}
- // ----- MessageDeserializer ---------------------------------------------
+ public TypeReference<T> getMessageType() {
+ return messageType;
+ }
+ public TypeReference<AtlasNotificationMessage<T>> getNotificationMessageType() {
+ return notificationMessageType;
+ }
+
+ // ----- MessageDeserializer ---------------------------------------------
@Override
public T deserialize(String messageJson) {
final T ret;
@@ -99,15 +108,15 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
messageCountTotal.incrementAndGet();
messageCountSinceLastInterval.incrementAndGet();
- AtlasNotificationBaseMessage msg = gson.fromJson(messageJson, AtlasNotificationBaseMessage.class);
+ AtlasNotificationBaseMessage msg = AtlasType.fromV1Json(messageJson, AtlasNotificationBaseMessage.class);
if (msg.getVersion() == null) { // older style messages not wrapped with AtlasNotificationMessage
- ret = gson.fromJson(messageJson, messageType);
+ ret = AtlasType.fromV1Json(messageJson, messageType);
} else {
String msgJson = messageJson;
if (msg.getMsgSplitCount() > 1) { // multi-part message
- AtlasNotificationStringMessage splitMsg = gson.fromJson(msgJson, AtlasNotificationStringMessage.class);
+ AtlasNotificationStringMessage splitMsg = AtlasType.fromV1Json(msgJson, AtlasNotificationStringMessage.class);
checkVersion(splitMsg, msgJson);
@@ -184,7 +193,7 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
LOG.info("Received msgID={}: splitCount={}, length={} bytes", msgId, splitCount, bytes.length);
}
- msg = gson.fromJson(msgJson, AtlasNotificationBaseMessage.class);
+ msg = AtlasType.fromV1Json(msgJson, AtlasNotificationBaseMessage.class);
} else {
msg = null;
}
@@ -197,7 +206,7 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
if (msg != null) {
if (CompressionKind.GZIP.equals(msg.getMsgCompressionKind())) {
- AtlasNotificationStringMessage compressedMsg = gson.fromJson(msgJson, AtlasNotificationStringMessage.class);
+ AtlasNotificationStringMessage compressedMsg = AtlasType.fromV1Json(msgJson, AtlasNotificationStringMessage.class);
byte[] encodedBytes = AtlasNotificationBaseMessage.getBytesUtf8(compressedMsg.getMessage());
byte[] bytes = AtlasNotificationBaseMessage.decodeBase64AndGzipUncompress(encodedBytes);
@@ -207,7 +216,7 @@ public abstract class AtlasNotificationMessageDeserializer<T> implements Message
LOG.info("Received msgID={}: compressed={} bytes, uncompressed={} bytes", compressedMsg.getMsgId(), encodedBytes.length, bytes.length);
}
- AtlasNotificationMessage<T> atlasNotificationMessage = gson.fromJson(msgJson, notificationMessageType);
+ AtlasNotificationMessage<T> atlasNotificationMessage = AtlasType.fromV1Json(msgJson, notificationMessageType);
checkVersion(atlasNotificationMessage, msgJson);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationStringMessage.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationStringMessage.java b/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationStringMessage.java
deleted file mode 100644
index 41485a0..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/AtlasNotificationStringMessage.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification;
-
-
-public class AtlasNotificationStringMessage extends AtlasNotificationBaseMessage {
- private String message = null;
-
- public AtlasNotificationStringMessage() {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION);
- }
-
- public AtlasNotificationStringMessage(String message) {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION);
-
- this.message = message;
- }
-
- public AtlasNotificationStringMessage(String message, String msgId, CompressionKind compressionKind) {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION, msgId, compressionKind);
-
- this.message = message;
- }
-
- public AtlasNotificationStringMessage(String message, String msgId, CompressionKind compressionKind, int msgSplitIdx, int msgSplitCount) {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION, msgId, compressionKind, msgSplitIdx, msgSplitCount);
-
- this.message = message;
- }
-
- public AtlasNotificationStringMessage(byte[] encodedBytes, String msgId, CompressionKind compressionKind) {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION, msgId, compressionKind);
-
- this.message = AtlasNotificationBaseMessage.getStringUtf8(encodedBytes);
- }
-
- public AtlasNotificationStringMessage(byte[] encodedBytes, int offset, int length, String msgId, CompressionKind compressionKind, int msgSplitIdx, int msgSplitCount) {
- super(AbstractNotification.CURRENT_MESSAGE_VERSION, msgId, compressionKind, msgSplitIdx, msgSplitCount);
-
- this.message = new String(encodedBytes, offset, length);
- }
-
- public void setMessage(String message) {
- this.message = message;
- }
-
- public String getMessage() {
- return message;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/MessageVersion.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/MessageVersion.java b/notification/src/main/java/org/apache/atlas/notification/MessageVersion.java
deleted file mode 100644
index 7f96638..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/MessageVersion.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-
-/**
- * Represents the version of a notification message.
- */
-public class MessageVersion implements Comparable<MessageVersion> {
- /**
- * Used for message with no version (old format).
- */
- public static final MessageVersion NO_VERSION = new MessageVersion("0");
- public static final MessageVersion VERSION_1 = new MessageVersion("1.0.0");
-
- public static final MessageVersion CURRENT_VERSION = VERSION_1;
-
- private final String version;
-
-
- // ----- Constructors ----------------------------------------------------
-
- /**
- * Create a message version.
- *
- * @param version the version string
- */
- public MessageVersion(String version) {
- this.version = version;
-
- try {
- getVersionParts();
- } catch (NumberFormatException e) {
- throw new IllegalArgumentException(String.format("Invalid version string : %s.", version), e);
- }
- }
-
-
- // ----- Comparable ------------------------------------------------------
-
- @Override
- public int compareTo(MessageVersion that) {
- if (that == null) {
- return 1;
- }
-
- Integer[] thisParts = getVersionParts();
- Integer[] thatParts = that.getVersionParts();
-
- int length = Math.max(thisParts.length, thatParts.length);
-
- for (int i = 0; i < length; i++) {
-
- int comp = getVersionPart(thisParts, i) - getVersionPart(thatParts, i);
-
- if (comp != 0) {
- return comp;
- }
- }
- return 0;
- }
-
-
- // ----- Object overrides ------------------------------------------------
-
- @Override
- public boolean equals(Object that) {
- if (this == that){
- return true;
- }
-
- if (that == null || getClass() != that.getClass()) {
- return false;
- }
-
- return compareTo((MessageVersion) that) == 0;
- }
-
- @Override
- public int hashCode() {
- return Arrays.hashCode(getVersionParts());
- }
-
-
- @Override
- public String toString() {
- return "MessageVersion[version=" + version + "]";
- }
-
- // ----- helper methods --------------------------------------------------
-
- /**
- * Get the version parts array by splitting the version string.
- * Strip the trailing zeros (i.e. '1.0.0' equals '1').
- *
- * @return the version parts array
- */
- protected Integer[] getVersionParts() {
-
- String[] sParts = version.split("\\.");
- ArrayList<Integer> iParts = new ArrayList<>();
- int trailingZeros = 0;
-
- for (String sPart : sParts) {
- Integer iPart = new Integer(sPart);
-
- if (iPart == 0) {
- ++trailingZeros;
- } else {
- for (int i = 0; i < trailingZeros; ++i) {
- iParts.add(0);
- }
- trailingZeros = 0;
- iParts.add(iPart);
- }
- }
- return iParts.toArray(new Integer[iParts.size()]);
- }
-
- private Integer getVersionPart(Integer[] versionParts, int i) {
- return i < versionParts.length ? versionParts[i] : 0;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java b/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
index 8809225..6caf7e2 100644
--- a/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
+++ b/notification/src/main/java/org/apache/atlas/notification/NotificationInterface.java
@@ -17,13 +17,9 @@
*/
package org.apache.atlas.notification;
-import com.google.gson.reflect.TypeToken;
import org.apache.atlas.notification.entity.EntityMessageDeserializer;
-import org.apache.atlas.notification.entity.EntityNotification;
import org.apache.atlas.notification.hook.HookMessageDeserializer;
-import org.apache.atlas.notification.hook.HookNotification;
-import java.lang.reflect.Type;
import java.util.List;
/**
@@ -43,57 +39,22 @@ public interface NotificationInterface {
String PROPERTY_PREFIX = "atlas.notification";
/**
- * Notification message class types.
- */
- Class<HookNotification.HookNotificationMessage> HOOK_NOTIFICATION_CLASS =
- HookNotification.HookNotificationMessage.class;
-
- Class<EntityNotification> ENTITY_NOTIFICATION_CLASS = EntityNotification.class;
-
- /**
- * Versioned notification message class types.
- */
- Type HOOK_VERSIONED_MESSAGE_TYPE =
- new TypeToken<AtlasNotificationMessage<HookNotification.HookNotificationMessage>>(){}.getType();
-
- Type ENTITY_VERSIONED_MESSAGE_TYPE = new TypeToken<AtlasNotificationMessage<EntityNotification>>(){}.getType();
-
- /**
* Atlas notification types.
*/
enum NotificationType {
-
// Notifications from the Atlas integration hooks.
- HOOK(HOOK_NOTIFICATION_CLASS, new HookMessageDeserializer()),
+ HOOK(new HookMessageDeserializer()),
// Notifications to entity change consumers.
- ENTITIES(ENTITY_NOTIFICATION_CLASS, new EntityMessageDeserializer());
-
-
- /**
- * The notification class associated with this type.
- */
- private final Class classType;
-
- /**
- * The message deserializer for this type.
- */
- private final MessageDeserializer deserializer;
+ ENTITIES(new EntityMessageDeserializer());
+ private final AtlasNotificationMessageDeserializer deserializer;
- NotificationType(Class classType, MessageDeserializer<?> deserializer) {
- this.classType = classType;
+ NotificationType(AtlasNotificationMessageDeserializer deserializer) {
this.deserializer = deserializer;
}
-
- // ----- accessors ---------------------------------------------------
-
- public Class getClassType() {
- return classType;
- }
-
- public MessageDeserializer getDeserializer() {
+ public AtlasNotificationMessageDeserializer getDeserializer() {
return deserializer;
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/SplitMessageAggregator.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/SplitMessageAggregator.java b/notification/src/main/java/org/apache/atlas/notification/SplitMessageAggregator.java
index 148b57f..10df121 100644
--- a/notification/src/main/java/org/apache/atlas/notification/SplitMessageAggregator.java
+++ b/notification/src/main/java/org/apache/atlas/notification/SplitMessageAggregator.java
@@ -18,6 +18,8 @@
package org.apache.atlas.notification;
+import org.apache.atlas.model.notification.AtlasNotificationStringMessage;
+
public class SplitMessageAggregator {
private final String msgId;
private final AtlasNotificationStringMessage[] splitMessagesBuffer;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/entity/EntityMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/entity/EntityMessageDeserializer.java b/notification/src/main/java/org/apache/atlas/notification/entity/EntityMessageDeserializer.java
index a6f7e64..fa160cf 100644
--- a/notification/src/main/java/org/apache/atlas/notification/entity/EntityMessageDeserializer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/entity/EntityMessageDeserializer.java
@@ -18,19 +18,14 @@
package org.apache.atlas.notification.entity;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.EntityNotification;
import org.apache.atlas.notification.AbstractMessageDeserializer;
import org.apache.atlas.notification.AbstractNotification;
-import org.apache.atlas.notification.NotificationInterface;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.lang.reflect.Type;
-import java.util.Collections;
-import java.util.Map;
-
/**
* Entity notification message deserializer.
*/
@@ -48,29 +43,19 @@ public class EntityMessageDeserializer extends AbstractMessageDeserializer<Entit
* Create an entity notification message deserializer.
*/
public EntityMessageDeserializer() {
- super(NotificationInterface.ENTITY_VERSIONED_MESSAGE_TYPE,
- AbstractNotification.CURRENT_MESSAGE_VERSION, getDeserializerMap(), NOTIFICATION_LOGGER);
- }
-
-
- // ----- helper methods --------------------------------------------------
-
- private static Map<Type, JsonDeserializer> getDeserializerMap() {
- return Collections.<Type, JsonDeserializer>singletonMap(
- NotificationInterface.ENTITY_NOTIFICATION_CLASS, new EntityNotificationDeserializer());
+ super(new TypeReference<EntityNotification>() {},
+ new TypeReference<AtlasNotificationMessage<EntityNotification>>() {},
+ AbstractNotification.CURRENT_MESSAGE_VERSION, NOTIFICATION_LOGGER);
}
+ @Override
+ public EntityNotification deserialize(String messageJson) {
+ final EntityNotification ret = super.deserialize(messageJson);
- // ----- deserializer classes --------------------------------------------
-
- /**
- * Deserializer for EntityNotification.
- */
- protected static final class EntityNotificationDeserializer implements JsonDeserializer<EntityNotification> {
- @Override
- public EntityNotification deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
- return context.deserialize(json, EntityNotificationImpl.class);
+ if (ret != null) {
+ ret.normalize();
}
+
+ return ret;
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotification.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotification.java b/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotification.java
deleted file mode 100644
index 379e815..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotification.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.notification.entity;
-
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-
-import java.util.List;
-
-/**
- * Notification of entity changes.
- */
-public interface EntityNotification {
-
- /**
- * Operations that result in an entity notification.
- */
- enum OperationType {
- ENTITY_CREATE,
- ENTITY_UPDATE,
- ENTITY_DELETE,
- TRAIT_ADD,
- TRAIT_DELETE,
- TRAIT_UPDATE
- }
-
-
- // ----- EntityNotification ------------------------------------------------
-
- /**
- * Get the entity that is associated with this notification.
- *
- * @return the associated entity
- */
- IReferenceableInstance getEntity();
-
- /**
- * Get flattened list of traits that are associated with this entity (includes super traits).
- *
- * @return the list of all traits
- */
- List<IStruct> getAllTraits();
-
- /**
- * Get the type of operation that triggered this notification.
- *
- * @return the operation type
- */
- OperationType getOperationType();
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotificationImpl.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotificationImpl.java b/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotificationImpl.java
deleted file mode 100644
index 6a9b362..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/entity/EntityNotificationImpl.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.notification.entity;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-
-/**
- * Entity notification implementation.
- */
-public class EntityNotificationImpl implements EntityNotification {
-
- private final Referenceable entity;
- private final OperationType operationType;
- private final List<IStruct> traits;
-
-
- // ----- Constructors ------------------------------------------------------
-
- /**
- * No-arg constructor for serialization.
- */
- @SuppressWarnings("unused")
- private EntityNotificationImpl() throws AtlasException {
- this(null, OperationType.ENTITY_CREATE, Collections.<IStruct>emptyList());
- }
-
- /**
- * Construct an EntityNotification.
- *
- * @param entity the entity subject of the notification
- * @param operationType the type of operation that caused the notification
- * @param traits the traits for the given entity
- *
- * @throws AtlasException if the entity notification can not be created
- */
- public EntityNotificationImpl(Referenceable entity, OperationType operationType, List<IStruct> traits)
- throws AtlasException {
- this.entity = entity;
- this.operationType = operationType;
- this.traits = traits;
- }
-
- /**
- * Construct an EntityNotification.
- *
- * @param entity the entity subject of the notification
- * @param operationType the type of operation that caused the notification
- * @param typeSystem the Atlas type system
- *
- * @throws AtlasException if the entity notification can not be created
- */
- public EntityNotificationImpl(Referenceable entity, OperationType operationType, TypeSystem typeSystem)
- throws AtlasException {
- this(entity, operationType, getAllTraits(entity, typeSystem));
- }
-
-
- // ----- EntityNotification ------------------------------------------------
-
- @Override
- public IReferenceableInstance getEntity() {
- return entity;
- }
-
- @Override
- public List<IStruct> getAllTraits() {
- return traits;
- }
-
- @Override
- public OperationType getOperationType() {
- return operationType;
- }
-
-
- // ----- Object overrides --------------------------------------------------
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- EntityNotificationImpl that = (EntityNotificationImpl) o;
- return Objects.equals(entity, that.entity) &&
- operationType == that.operationType &&
- Objects.equals(traits, that.traits);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(entity, operationType, traits);
- }
-
-
- // ----- helper methods ----------------------------------------------------
-
- private static List<IStruct> getAllTraits(IReferenceableInstance entityDefinition,
- TypeSystem typeSystem) throws AtlasException {
- List<IStruct> traitInfo = new LinkedList<>();
- for (String traitName : entityDefinition.getTraits()) {
- IStruct trait = entityDefinition.getTrait(traitName);
- String typeName = trait.getTypeName();
- Map<String, Object> valuesMap = trait.getValuesMap();
- traitInfo.add(new Struct(typeName, valuesMap));
- traitInfo.addAll(getSuperTraits(typeName, valuesMap, typeSystem));
- }
- return traitInfo;
- }
-
- private static List<IStruct> getSuperTraits(
- String typeName, Map<String, Object> values, TypeSystem typeSystem) throws AtlasException {
-
- List<IStruct> superTypes = new LinkedList<>();
-
- TraitType traitDef = typeSystem.getDataType(TraitType.class, typeName);
- Set<String> superTypeNames = traitDef.getAllSuperTypeNames();
-
- for (String superTypeName : superTypeNames) {
- TraitType superTraitDef = typeSystem.getDataType(TraitType.class, superTypeName);
-
- Map<String, Object> superTypeValues = new HashMap<>();
-
- FieldMapping fieldMapping = superTraitDef.fieldMapping();
-
- if (fieldMapping != null) {
- Set<String> superTypeAttributeNames = fieldMapping.fields.keySet();
-
- for (String superTypeAttributeName : superTypeAttributeNames) {
- if (values.containsKey(superTypeAttributeName)) {
- superTypeValues.put(superTypeAttributeName, values.get(superTypeAttributeName));
- }
- }
- }
- IStruct superTrait = new Struct(superTypeName, superTypeValues);
- superTypes.add(superTrait);
- superTypes.addAll(getSuperTraits(superTypeName, values, typeSystem));
- }
-
- return superTypes;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/hook/HookMessageDeserializer.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/hook/HookMessageDeserializer.java b/notification/src/main/java/org/apache/atlas/notification/hook/HookMessageDeserializer.java
index 8337de0..cab442f 100644
--- a/notification/src/main/java/org/apache/atlas/notification/hook/HookMessageDeserializer.java
+++ b/notification/src/main/java/org/apache/atlas/notification/hook/HookMessageDeserializer.java
@@ -18,21 +18,20 @@
package org.apache.atlas.notification.hook;
-import com.google.gson.JsonDeserializer;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.HookNotification;
import org.apache.atlas.notification.AbstractMessageDeserializer;
import org.apache.atlas.notification.AbstractNotification;
-import org.apache.atlas.notification.NotificationInterface;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import java.lang.reflect.Type;
-import java.util.Collections;
-import java.util.Map;
+
/**
* Hook notification message deserializer.
*/
-public class HookMessageDeserializer extends AbstractMessageDeserializer<HookNotification.HookNotificationMessage> {
+public class HookMessageDeserializer extends AbstractMessageDeserializer<HookNotification> {
/**
* Logger for hook notification messages.
@@ -46,15 +45,19 @@ public class HookMessageDeserializer extends AbstractMessageDeserializer<HookNot
* Create a hook notification message deserializer.
*/
public HookMessageDeserializer() {
- super(NotificationInterface.HOOK_VERSIONED_MESSAGE_TYPE,
- AbstractNotification.CURRENT_MESSAGE_VERSION, getDeserializerMap(), NOTIFICATION_LOGGER);
+ super(new TypeReference<HookNotification>() {},
+ new TypeReference<AtlasNotificationMessage<HookNotification>>() {},
+ AbstractNotification.CURRENT_MESSAGE_VERSION, NOTIFICATION_LOGGER);
}
+ @Override
+ public HookNotification deserialize(String messageJson) {
+ final HookNotification ret = super.deserialize(messageJson);
- // ----- helper methods --------------------------------------------------
+ if (ret != null) {
+ ret.normalize();
+ }
- private static Map<Type, JsonDeserializer> getDeserializerMap() {
- return Collections.<Type, JsonDeserializer>singletonMap(
- NotificationInterface.HOOK_NOTIFICATION_CLASS, new HookNotification());
+ return ret;
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/main/java/org/apache/atlas/notification/hook/HookNotification.java
----------------------------------------------------------------------
diff --git a/notification/src/main/java/org/apache/atlas/notification/hook/HookNotification.java b/notification/src/main/java/org/apache/atlas/notification/hook/HookNotification.java
deleted file mode 100644
index a25aa52..0000000
--- a/notification/src/main/java/org/apache/atlas/notification/hook/HookNotification.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.notification.hook;
-
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonObject;
-import com.google.gson.JsonParseException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.commons.lang.StringUtils;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-
-import java.lang.reflect.Type;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Contains the structure of messages transferred from hooks to atlas.
- */
-public class HookNotification implements JsonDeserializer<HookNotification.HookNotificationMessage> {
-
- @Override
- public HookNotificationMessage deserialize(JsonElement json, Type typeOfT,
- JsonDeserializationContext context) {
- HookNotificationType type =
- context.deserialize(((JsonObject) json).get("type"), HookNotificationType.class);
- switch (type) {
- case ENTITY_CREATE:
- return context.deserialize(json, EntityCreateRequest.class);
-
- case ENTITY_FULL_UPDATE:
- return context.deserialize(json, EntityUpdateRequest.class);
-
- case ENTITY_PARTIAL_UPDATE:
- return context.deserialize(json, EntityPartialUpdateRequest.class);
-
- case ENTITY_DELETE:
- return context.deserialize(json, EntityDeleteRequest.class);
-
- case TYPE_CREATE:
- case TYPE_UPDATE:
- return context.deserialize(json, TypeRequest.class);
-
- default:
- throw new IllegalStateException("Unhandled type " + type);
- }
- }
-
- /**
- * Type of the hook message.
- */
- public enum HookNotificationType {
- TYPE_CREATE, TYPE_UPDATE, ENTITY_CREATE, ENTITY_PARTIAL_UPDATE, ENTITY_FULL_UPDATE, ENTITY_DELETE
- }
-
- /**
- * Base type of hook message.
- */
- public static class HookNotificationMessage {
- public static final String UNKNOW_USER = "UNKNOWN";
- protected HookNotificationType type;
- protected String user;
-
- private HookNotificationMessage() {
- }
-
- public HookNotificationMessage(HookNotificationType type, String user) {
- this.type = type;
- this.user = user;
- }
-
- public HookNotificationType getType() {
- return type;
- }
-
- public String getUser() {
- if (StringUtils.isEmpty(user)) {
- return UNKNOW_USER;
- }
- return user;
- }
-
-
- }
-
- /**
- * Hook message for create type definitions.
- */
- public static class TypeRequest extends HookNotificationMessage {
- private TypesDef typesDef;
-
- private TypeRequest() {
- }
-
- public TypeRequest(HookNotificationType type, TypesDef typesDef, String user) {
- super(type, user);
- this.typesDef = typesDef;
- }
-
- public TypesDef getTypesDef() {
- return typesDef;
- }
- }
-
- /**
- * Hook message for creating new entities.
- */
- public static class EntityCreateRequest extends HookNotificationMessage {
- private List<Referenceable> entities;
-
- private EntityCreateRequest() {
- }
-
- public EntityCreateRequest(String user, Referenceable... entities) {
- this(HookNotificationType.ENTITY_CREATE, Arrays.asList(entities), user);
- }
-
- public EntityCreateRequest(String user, List<Referenceable> entities) {
- this(HookNotificationType.ENTITY_CREATE, entities, user);
- }
-
- protected EntityCreateRequest(HookNotificationType type, List<Referenceable> entities, String user) {
- super(type, user);
- this.entities = entities;
- }
-
- public EntityCreateRequest(String user, JSONArray jsonArray) {
- super(HookNotificationType.ENTITY_CREATE, user);
- entities = new ArrayList<>();
- for (int index = 0; index < jsonArray.length(); index++) {
- try {
- entities.add(InstanceSerialization.fromJsonReferenceable(jsonArray.getString(index), true));
- } catch (JSONException e) {
- throw new JsonParseException(e);
- }
- }
- }
-
- public List<Referenceable> getEntities() {
- return entities;
- }
-
- @Override
- public String toString() {
- return entities.toString();
- }
- }
-
- /**
- * Hook message for updating entities(full update).
- */
- public static class EntityUpdateRequest extends EntityCreateRequest {
- public EntityUpdateRequest(String user, Referenceable... entities) {
- this(user, Arrays.asList(entities));
- }
-
- public EntityUpdateRequest(String user, List<Referenceable> entities) {
- super(HookNotificationType.ENTITY_FULL_UPDATE, entities, user);
- }
- }
-
- /**
- * Hook message for updating entities(partial update).
- */
- public static class EntityPartialUpdateRequest extends HookNotificationMessage {
- private String typeName;
- private String attribute;
- private Referenceable entity;
- private String attributeValue;
-
- private EntityPartialUpdateRequest() {
- }
-
- public EntityPartialUpdateRequest(String user, String typeName, String attribute, String attributeValue,
- Referenceable entity) {
- super(HookNotificationType.ENTITY_PARTIAL_UPDATE, user);
- this.typeName = typeName;
- this.attribute = attribute;
- this.attributeValue = attributeValue;
- this.entity = entity;
- }
-
- public String getTypeName() {
- return typeName;
- }
-
- public String getAttribute() {
- return attribute;
- }
-
- public Referenceable getEntity() {
- return entity;
- }
-
- public String getAttributeValue() {
- return attributeValue;
- }
-
- @Override
- public String toString() {
- return "{"
- + "entityType='" + typeName + '\''
- + ", attribute=" + attribute
- + ", value=" + attributeValue
- + ", entity=" + entity
- + '}';
- }
- }
-
- /**
- * Hook message for creating new entities.
- */
- public static class EntityDeleteRequest extends HookNotificationMessage {
-
- private String typeName;
- private String attribute;
- private String attributeValue;
-
- private EntityDeleteRequest() {
- }
-
- public EntityDeleteRequest(String user, String typeName, String attribute, String attributeValue) {
- this(HookNotificationType.ENTITY_DELETE, user, typeName, attribute, attributeValue);
- }
-
- protected EntityDeleteRequest(HookNotificationType type,
- String user, String typeName, String attribute, String attributeValue) {
- super(type, user);
- this.typeName = typeName;
- this.attribute = attribute;
- this.attributeValue = attributeValue;
- }
-
- public String getTypeName() {
- return typeName;
- }
-
- public String getAttribute() {
- return attribute;
- }
-
- public String getAttributeValue() {
- return attributeValue;
- }
-
- @Override
- public String toString() {
- return "{"
- + "entityType='" + typeName + '\''
- + ", attribute=" + attribute
- + ", value=" + attributeValue
- + '}';
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java b/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
index d59cb1c..0a0620f 100644
--- a/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
+++ b/notification/src/test/java/org/apache/atlas/hook/AtlasHookTest.java
@@ -18,9 +18,10 @@
package org.apache.atlas.hook;
+import org.apache.atlas.model.notification.HookNotification;
import org.apache.atlas.notification.NotificationException;
import org.apache.atlas.notification.NotificationInterface;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.annotations.BeforeMethod;
@@ -51,41 +52,41 @@ public class AtlasHookTest {
@Test (timeOut = 10000)
public void testNotifyEntitiesDoesNotHangOnException() throws Exception {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+ List<HookNotification> hookNotifications = new ArrayList<>();
doThrow(new NotificationException(new Exception())).when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 0, notificationInterface, false,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 0, notificationInterface, false,
failedMessagesLogger);
// if we've reached here, the method finished OK.
}
@Test
public void testNotifyEntitiesRetriesOnException() throws NotificationException {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages =
- new ArrayList<HookNotification.HookNotificationMessage>() {{
- add(new HookNotification.EntityCreateRequest("user"));
+ List<HookNotification> hookNotifications =
+ new ArrayList<HookNotification>() {{
+ add(new EntityCreateRequest("user"));
}
};
doThrow(new NotificationException(new Exception())).when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, false,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 2, notificationInterface, false,
failedMessagesLogger);
verify(notificationInterface, times(2)).
- send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
+ send(NotificationInterface.NotificationType.HOOK, hookNotifications);
}
@Test
public void testFailedMessageIsLoggedIfRequired() throws NotificationException {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages =
- new ArrayList<HookNotification.HookNotificationMessage>() {{
- add(new HookNotification.EntityCreateRequest("user"));
+ List<HookNotification> hookNotifications =
+ new ArrayList<HookNotification>() {{
+ add(new EntityCreateRequest("user"));
}
};
doThrow(new NotificationException(new Exception(), Arrays.asList("test message")))
.when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, true,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 2, notificationInterface, true,
failedMessagesLogger);
verify(failedMessagesLogger, times(1)).log("test message");
@@ -93,11 +94,11 @@ public class AtlasHookTest {
@Test
public void testFailedMessageIsNotLoggedIfNotRequired() throws NotificationException {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+ List<HookNotification> hookNotifications = new ArrayList<>();
doThrow(new NotificationException(new Exception(), Arrays.asList("test message")))
.when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, false,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 2, notificationInterface, false,
failedMessagesLogger);
verifyZeroInteractions(failedMessagesLogger);
@@ -105,15 +106,15 @@ public class AtlasHookTest {
@Test
public void testAllFailedMessagesAreLogged() throws NotificationException {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages =
- new ArrayList<HookNotification.HookNotificationMessage>() {{
- add(new HookNotification.EntityCreateRequest("user"));
+ List<HookNotification> hookNotifications =
+ new ArrayList<HookNotification>() {{
+ add(new EntityCreateRequest("user"));
}
};
doThrow(new NotificationException(new Exception(), Arrays.asList("test message1", "test message2")))
.when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, true,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 2, notificationInterface, true,
failedMessagesLogger);
verify(failedMessagesLogger, times(1)).log("test message1");
@@ -122,10 +123,10 @@ public class AtlasHookTest {
@Test
public void testFailedMessageIsNotLoggedIfNotANotificationException() throws Exception {
- List<HookNotification.HookNotificationMessage> hookNotificationMessages = new ArrayList<>();
+ List<HookNotification> hookNotifications = new ArrayList<>();
doThrow(new RuntimeException("test message")).when(notificationInterface)
- .send(NotificationInterface.NotificationType.HOOK, hookNotificationMessages);
- AtlasHook.notifyEntitiesInternal(hookNotificationMessages, 2, notificationInterface, true,
+ .send(NotificationInterface.NotificationType.HOOK, hookNotifications);
+ AtlasHook.notifyEntitiesInternal(hookNotifications, 2, notificationInterface, true,
failedMessagesLogger);
verifyZeroInteractions(failedMessagesLogger);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java b/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
index 08a20bd..2e8abd7 100644
--- a/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
+++ b/notification/src/test/java/org/apache/atlas/kafka/KafkaConsumerTest.java
@@ -18,34 +18,30 @@
package org.apache.atlas.kafka;
-import kafka.message.MessageAndMetadata;
-import org.apache.atlas.notification.*;
-import org.apache.atlas.notification.AtlasNotificationMessage;
-import org.apache.atlas.notification.entity.EntityNotificationImplTest;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.notification.IncompatibleVersionException;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.notification.entity.EntityNotificationTest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.model.notification.MessageVersion;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
-import org.codehaus.jettison.json.JSONException;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Collections;
-import java.util.LinkedList;
import java.util.List;
-import java.util.Arrays;
-import java.util.ArrayList;
-import java.util.HashMap;
import java.util.Map;
-import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@@ -55,7 +51,6 @@ import static org.testng.Assert.*;
* KafkaConsumer tests.
*/
public class KafkaConsumerTest {
-
private static final String TRAIT_NAME = "MyTrait";
@@ -70,88 +65,62 @@ public class KafkaConsumerTest {
@Test
public void testReceive() throws Exception {
-
-
- MessageAndMetadata<String, String> messageAndMetadata = mock(MessageAndMetadata.class);
-
- Referenceable entity = getEntity(TRAIT_NAME);
-
- HookNotification.EntityUpdateRequest message =
- new HookNotification.EntityUpdateRequest("user1", entity);
-
- String json = AbstractNotification.GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), message));
-
- kafkaConsumer.assign(Arrays.asList(new TopicPartition("ATLAS_HOOK", 0)));
- List<ConsumerRecord> klist = new ArrayList<>();
- klist.add(new ConsumerRecord<String, String>("ATLAS_HOOK",
- 0, 0L, "mykey", json));
-
- TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
- Map mp = new HashMap();
- mp.put(tp,klist);
- ConsumerRecords records = new ConsumerRecords(mp);
-
+ Referenceable entity = getEntity(TRAIT_NAME);
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ String json = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), message));
+ TopicPartition tp = new TopicPartition("ATLAS_HOOK", 0);
+ List<ConsumerRecord<String, String>> klist = Collections.singletonList(new ConsumerRecord<>("ATLAS_HOOK", 0, 0L, "mykey", json));
+ Map mp = Collections.singletonMap(tp, klist);
+ ConsumerRecords records = new ConsumerRecords(mp);
when(kafkaConsumer.poll(100)).thenReturn(records);
- when(messageAndMetadata.message()).thenReturn(json);
+ kafkaConsumer.assign(Collections.singletonList(tp));
+
+ AtlasKafkaConsumer consumer = new AtlasKafkaConsumer(NotificationType.HOOK, kafkaConsumer, false, 100L);
+ List<AtlasKafkaMessage<HookNotification>> messageList = consumer.receive();
- AtlasKafkaConsumer consumer = new AtlasKafkaConsumer(NotificationInterface.NotificationType.HOOK.getDeserializer(), kafkaConsumer, false, 100L);
- List<AtlasKafkaMessage<HookNotification.HookNotificationMessage>> messageList = consumer.receive();
assertTrue(messageList.size() > 0);
- HookNotification.HookNotificationMessage consumedMessage = messageList.get(0).getMessage();
+ HookNotification consumedMessage = messageList.get(0).getMessage();
assertMessagesEqual(message, consumedMessage, entity);
-
}
@Test
public void testNextVersionMismatch() throws Exception {
+ Referenceable entity = getEntity(TRAIT_NAME);
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ String json = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("2.0.0"), message));
+ TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
+ List<ConsumerRecord<String, String>> klist = Collections.singletonList(new ConsumerRecord<>("ATLAS_HOOK", 0, 0L, "mykey", json));
+ Map mp = Collections.singletonMap(tp,klist);
+ ConsumerRecords records = new ConsumerRecords(mp);
- MessageAndMetadata<String, String> messageAndMetadata = mock(MessageAndMetadata.class);
-
- Referenceable entity = getEntity(TRAIT_NAME);
-
- HookNotification.EntityUpdateRequest message =
- new HookNotification.EntityUpdateRequest("user1", entity);
-
- String json = AbstractNotification.GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("2.0.0"), message));
-
- kafkaConsumer.assign(Arrays.asList(new TopicPartition("ATLAS_HOOK", 0)));
- List<ConsumerRecord> klist = new ArrayList<>();
- klist.add(new ConsumerRecord<String, String>("ATLAS_HOOK",
- 0, 0L, "mykey", json));
-
- TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
- Map mp = new HashMap();
- mp.put(tp,klist);
- ConsumerRecords records = new ConsumerRecords(mp);
+ kafkaConsumer.assign(Collections.singletonList(tp));
when(kafkaConsumer.poll(100L)).thenReturn(records);
- when(messageAndMetadata.message()).thenReturn(json);
- AtlasKafkaConsumer consumer =new AtlasKafkaConsumer(NotificationInterface.NotificationType.HOOK.getDeserializer(), kafkaConsumer ,false, 100L);
+ AtlasKafkaConsumer consumer =new AtlasKafkaConsumer(NotificationType.HOOK, kafkaConsumer ,false, 100L);
+
try {
- List<AtlasKafkaMessage<HookNotification.HookNotificationMessage>> messageList = consumer.receive();
+ List<AtlasKafkaMessage<HookNotification>> messageList = consumer.receive();
+
assertTrue(messageList.size() > 0);
- HookNotification.HookNotificationMessage consumedMessage = messageList.get(0).getMessage();
+ HookNotification consumedMessage = messageList.get(0).getMessage();
fail("Expected VersionMismatchException!");
} catch (IncompatibleVersionException e) {
e.printStackTrace();
}
-
}
@Test
public void testCommitIsCalledIfAutoCommitDisabled() {
-
- TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
-
- AtlasKafkaConsumer consumer =new AtlasKafkaConsumer(NotificationInterface.NotificationType.HOOK.getDeserializer(), kafkaConsumer, false, 100L);
+ TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
+ AtlasKafkaConsumer consumer = new AtlasKafkaConsumer(NotificationType.HOOK, kafkaConsumer, false, 100L);
consumer.commit(tp, 1);
@@ -160,10 +129,8 @@ public class KafkaConsumerTest {
@Test
public void testCommitIsNotCalledIfAutoCommitEnabled() {
-
- TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
-
- AtlasKafkaConsumer consumer =new AtlasKafkaConsumer(NotificationInterface.NotificationType.HOOK.getDeserializer(), kafkaConsumer, true , 100L);
+ TopicPartition tp = new TopicPartition("ATLAS_HOOK",0);
+ AtlasKafkaConsumer consumer = new AtlasKafkaConsumer(NotificationType.HOOK, kafkaConsumer, true , 100L);
consumer.commit(tp, 1);
@@ -171,26 +138,21 @@ public class KafkaConsumerTest {
}
private Referenceable getEntity(String traitName) {
- Referenceable entity = EntityNotificationImplTest.getEntity("id");
- List<IStruct> traitInfo = new LinkedList<>();
- IStruct trait = new Struct(traitName, Collections.<String, Object>emptyMap());
- traitInfo.add(trait);
- return entity;
+ return EntityNotificationTest.getEntity("id", new Struct(traitName, Collections.<String, Object>emptyMap()));
}
- private void assertMessagesEqual(HookNotification.EntityUpdateRequest message,
- HookNotification.HookNotificationMessage consumedMessage,
- Referenceable entity) throws JSONException {
-
+ private void assertMessagesEqual(EntityUpdateRequest message,
+ HookNotification consumedMessage,
+ Referenceable entity) {
assertEquals(consumedMessage.getType(), message.getType());
assertEquals(consumedMessage.getUser(), message.getUser());
- assertTrue(consumedMessage instanceof HookNotification.EntityUpdateRequest);
+ assertTrue(consumedMessage instanceof EntityUpdateRequest);
- HookNotification.EntityUpdateRequest deserializedEntityUpdateRequest =
- (HookNotification.EntityUpdateRequest) consumedMessage;
+ EntityUpdateRequest deserializedEntityUpdateRequest = (EntityUpdateRequest) consumedMessage;
Referenceable deserializedEntity = deserializedEntityUpdateRequest.getEntities().get(0);
+
assertEquals(deserializedEntity.getId(), entity.getId());
assertEquals(deserializedEntity.getTypeName(), entity.getTypeName());
assertEquals(deserializedEntity.getTraits(), entity.getTraits());
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationMockTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationMockTest.java b/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationMockTest.java
index 09e2e43..78d2a90 100644
--- a/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationMockTest.java
+++ b/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationMockTest.java
@@ -25,6 +25,7 @@ import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.TopicPartition;
import org.testng.annotations.Test;
+import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -32,7 +33,6 @@ import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
-import scala.actors.threadpool.Arrays;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationTest.java b/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationTest.java
index a1e13b9..e0655f3 100644
--- a/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationTest.java
+++ b/notification/src/test/java/org/apache/atlas/kafka/KafkaNotificationTest.java
@@ -19,29 +19,28 @@
package org.apache.atlas.kafka;
import org.apache.atlas.ApplicationProperties;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.notification.NotificationConsumer;
import org.apache.atlas.notification.NotificationInterface;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
-import static org.apache.atlas.notification.hook.HookNotification.HookNotificationMessage;
+import org.apache.atlas.model.notification.HookNotification;
import java.util.List;
import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
public class KafkaNotificationTest {
-
private KafkaNotification kafkaNotification;
@BeforeClass
public void setup() throws Exception {
Configuration properties = ApplicationProperties.get();
+
properties.setProperty("atlas.kafka.data", "target/" + RandomStringUtils.randomAlphanumeric(5));
kafkaNotification = new KafkaNotification(properties);
@@ -56,29 +55,27 @@ public class KafkaNotificationTest {
@Test
public void testReceiveKafkaMessages() throws Exception {
- kafkaNotification.send(NotificationInterface.NotificationType.HOOK,
- new HookNotification.EntityCreateRequest("u1", new Referenceable("type")));
- kafkaNotification.send(NotificationInterface.NotificationType.HOOK,
- new HookNotification.EntityCreateRequest("u2", new Referenceable("type")));
- kafkaNotification.send(NotificationInterface.NotificationType.HOOK,
- new HookNotification.EntityCreateRequest("u3", new Referenceable("type")));
- kafkaNotification.send(NotificationInterface.NotificationType.HOOK,
- new HookNotification.EntityCreateRequest("u4", new Referenceable("type")));
-
- NotificationConsumer<Object> consumer =
- kafkaNotification.createConsumers(NotificationInterface.NotificationType.HOOK, 1).get(0);
- List<AtlasKafkaMessage<Object>> messages = null ;
- long startTime = System.currentTimeMillis(); //fetch starting time
+ kafkaNotification.send(NotificationInterface.NotificationType.HOOK, new EntityCreateRequest("u1", new Referenceable("type")));
+ kafkaNotification.send(NotificationInterface.NotificationType.HOOK, new EntityCreateRequest("u2", new Referenceable("type")));
+ kafkaNotification.send(NotificationInterface.NotificationType.HOOK, new EntityCreateRequest("u3", new Referenceable("type")));
+ kafkaNotification.send(NotificationInterface.NotificationType.HOOK, new EntityCreateRequest("u4", new Referenceable("type")));
+
+ NotificationConsumer<Object> consumer = kafkaNotification.createConsumers(NotificationInterface.NotificationType.HOOK, 1).get(0);
+ List<AtlasKafkaMessage<Object>> messages = null ;
+ long startTime = System.currentTimeMillis(); //fetch starting time
+
while ((System.currentTimeMillis() - startTime) < 10000) {
messages = consumer.receive();
+
if (messages.size() > 0) {
break;
}
}
- int i=1;
+ int i = 1;
for (AtlasKafkaMessage<Object> msg : messages){
- HookNotification.HookNotificationMessage message = (HookNotificationMessage) msg.getMessage();
+ HookNotification message = (HookNotification) msg.getMessage();
+
assertEquals(message.getUser(), "u"+i++);
}
[41/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index f7404ae..773a1e8 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -19,18 +19,16 @@
package org.apache.atlas.hive.hook;
import com.google.common.base.Joiner;
-import com.google.common.collect.ImmutableList;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.hive.HiveITBase;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.commons.lang.RandomStringUtils;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.TableType;
@@ -182,7 +180,7 @@ public class HiveHookIT extends HiveITBase {
private void verifyTimestamps(Referenceable ref, String property, long expectedTime) throws ParseException {
//Verify timestamps.
String createTimeStr = (String) ref.get(property);
- Date createDate = TypeSystem.getInstance().getDateFormat().parse(createTimeStr);
+ Date createDate = AtlasBaseTypeDef.DATE_FORMATTER.parse(createTimeStr);
Assert.assertNotNull(createTimeStr);
if (expectedTime > 0) {
@@ -1262,7 +1260,7 @@ public class HiveHookIT extends HiveITBase {
String guid2 = assertColumnIsRegistered(HiveMetaStoreBridge.getColumnQualifiedName(tbqn, "id_new"));
assertEquals(guid2, guid);
- assertTrue(atlasClient.getEntity(guid2).getTraits().contains(trait));
+ assertTrue(atlasClient.getEntity(guid2).getTraitNames().contains(trait));
}
@Test
@@ -1349,15 +1347,15 @@ public class HiveHookIT extends HiveITBase {
@Test
public void testAlterTableBucketingClusterSort() throws Exception {
String tableName = createTable();
- ImmutableList<String> cols = ImmutableList.of("id");
+ List<String> cols = Collections.singletonList("id");
runBucketSortQuery(tableName, 5, cols, cols);
- cols = ImmutableList.of("id", NAME);
+ cols = Arrays.asList("id", NAME);
runBucketSortQuery(tableName, 2, cols, cols);
}
- private void runBucketSortQuery(String tableName, final int numBuckets, final ImmutableList<String> bucketCols,
- final ImmutableList<String> sortCols) throws Exception {
+ private void runBucketSortQuery(String tableName, final int numBuckets, final List<String> bucketCols,
+ final List<String> sortCols) throws Exception {
final String fmtQuery = "alter table %s CLUSTERED BY (%s) SORTED BY (%s) INTO %s BUCKETS";
String query = String.format(fmtQuery, tableName, stripListBrackets(bucketCols.toString()),
stripListBrackets(sortCols.toString()), numBuckets);
@@ -1375,8 +1373,8 @@ public class HiveHookIT extends HiveITBase {
}
private void verifyBucketSortingProperties(Referenceable tableRef, int numBuckets,
- ImmutableList<String> bucketColNames,
- ImmutableList<String> sortcolNames) throws Exception {
+ List<String> bucketColNames,
+ List<String> sortcolNames) throws Exception {
Referenceable sdRef = (Referenceable) tableRef.get(HiveMetaStoreBridge.STORAGE_DESC);
Assert.assertEquals(((scala.math.BigInt) sdRef.get(HiveMetaStoreBridge.STORAGE_NUM_BUCKETS)).intValue(),
numBuckets);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/sqoop-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/sqoop-bridge/pom.xml b/addons/sqoop-bridge/pom.xml
index d46d8a9..5a9305e 100644
--- a/addons/sqoop-bridge/pom.xml
+++ b/addons/sqoop-bridge/pom.xml
@@ -94,11 +94,6 @@
<scope>compile</scope>
</dependency>
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-v1</artifactId>
@@ -155,13 +150,6 @@
<type>pom</type>
<scope>test</scope>
</dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
@@ -206,21 +194,6 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-core_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-ast_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>atlas-client-common</artifactId>
<version>${project.version}</version>
@@ -237,11 +210,6 @@
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
<artifactId>atlas-intg</artifactId>
<version>${project.version}</version>
</artifactItem>
@@ -256,28 +224,8 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -285,11 +233,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</artifactItem>
- <artifactItem>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </artifactItem>
</artifactItems>
</configuration>
</execution>
@@ -363,7 +306,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../../intg/target/test-classes</value>
</systemProperty>
<systemProperty>
<key>atlas.home</key>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
----------------------------------------------------------------------
diff --git a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
index 50e20fa..5ded92c 100644
--- a/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
+++ b/addons/sqoop-bridge/src/main/java/org/apache/atlas/sqoop/hook/SqoopHook.java
@@ -26,9 +26,10 @@ import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.hook.AtlasHook;
import org.apache.atlas.hook.AtlasHookException;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.apache.atlas.sqoop.model.SqoopDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.apache.sqoop.SqoopJobDataPublisher;
@@ -185,8 +186,8 @@ public class SqoopHook extends SqoopJobDataPublisher {
Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
- HookNotification.HookNotificationMessage message =
- new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
+ HookNotification message =
+ new EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
}
catch(Exception e) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/storm-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/storm-bridge/pom.xml b/addons/storm-bridge/pom.xml
index 16104ca..e57c021 100644
--- a/addons/storm-bridge/pom.xml
+++ b/addons/storm-bridge/pom.xml
@@ -38,11 +38,6 @@
<!-- apache atlas core dependencies -->
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-v1</artifactId>
</dependency>
@@ -124,13 +119,6 @@
<type>war</type>
<scope>test</scope>
</dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
@@ -160,21 +148,6 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-core_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-ast_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>atlas-client-common</artifactId>
<version>${project.version}</version>
@@ -191,11 +164,6 @@
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
<artifactId>hive-bridge</artifactId>
<version>${project.version}</version>
</artifactItem>
@@ -220,28 +188,8 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -300,11 +248,6 @@
<version>${hadoop.version}</version>
</artifactItem>
<artifactItem>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${log4j.version}</version>
@@ -347,60 +290,6 @@
<build>
<plugins>
<plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <scalaVersion>${scala.version}</scalaVersion>
- <recompileMode>incremental</recompileMode>
- <useZincServer>true</useZincServer>
- <source>1.7</source>
- <target>1.7</target>
- <args>
- <arg>-unchecked</arg>
- <arg>-deprecation</arg>
- <arg>-feature</arg>
- </args>
- <jvmArgs>
- <jvmArg>-Xmx512m</jvmArg>
- </jvmArgs>
- <!--
- <javacArgs>
- <javacArg>-source</javacArg>
- <javacArg>${java.version}</javacArg>
- <javacArg>-target</javacArg>
- <javacArg>${java.version}</javacArg>
- </javacArgs>
- -->
- <!-- The following plugin is required to use quasiquotes in Scala 2.10 and is used
- by Spark SQL for code generation. -->
- <!--<compilerPlugins>
- <compilerPlugin>
- <groupId>org.scalamacros</groupId>
- <artifactId>paradise_${scala.version}</artifactId>
- <version>${scala.macros.version}</version>
- </compilerPlugin>
- </compilerPlugins>-->
- </configuration>
- </plugin>
-
- <plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<configuration>
@@ -437,7 +326,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../../intg/target/test-classes</value>
</systemProperty>
<systemProperty>
<key>atlas.home</key>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
----------------------------------------------------------------------
diff --git a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
index c3df8fc..57fc7a1 100644
--- a/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
+++ b/addons/storm-bridge/src/main/java/org/apache/atlas/storm/hook/StormAtlasHook.java
@@ -18,6 +18,7 @@
package org.apache.atlas.storm.hook;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.storm.ISubmitterHook;
import org.apache.storm.generated.Bolt;
import org.apache.storm.generated.SpoutSpec;
@@ -29,7 +30,6 @@ import org.apache.atlas.AtlasConstants;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hook.AtlasHook;
import org.apache.atlas.storm.model.StormDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java
----------------------------------------------------------------------
diff --git a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java b/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java
index e0800b8..5fef38d 100644
--- a/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java
+++ b/addons/storm-bridge/src/test/java/org/apache/atlas/storm/hook/StormAtlasHookIT.java
@@ -21,8 +21,8 @@ package org.apache.atlas.storm.hook;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.storm.model.StormDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.configuration.Configuration;
import org.apache.storm.ILocalCluster;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/authorization/pom.xml
----------------------------------------------------------------------
diff --git a/authorization/pom.xml b/authorization/pom.xml
index 143c4e4..f210a2f 100644
--- a/authorization/pom.xml
+++ b/authorization/pom.xml
@@ -42,8 +42,15 @@
</dependency>
<dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+
+ <dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
+ <scope>test</scope>
</dependency>
</dependencies>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/client-v1/pom.xml
----------------------------------------------------------------------
diff --git a/client/client-v1/pom.xml b/client/client-v1/pom.xml
index 2b492bb..e25a308 100644
--- a/client/client-v1/pom.xml
+++ b/client/client-v1/pom.xml
@@ -31,18 +31,13 @@
<dependencies>
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
+ <artifactId>atlas-common</artifactId>
+ <version>${project.version}</version>
</dependency>
</dependencies>
</project>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java
----------------------------------------------------------------------
diff --git a/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java b/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java
index 8bbc89b..bcdec71 100644
--- a/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java
+++ b/client/client-v1/src/main/java/org/apache/atlas/AtlasClient.java
@@ -19,19 +19,16 @@
package org.apache.atlas;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.api.client.WebResource;
import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.typedef.AttributeDefinition;
+import org.apache.atlas.v1.model.typedef.TraitTypeDefinition;
+import org.apache.atlas.v1.model.typedef.TypesDef;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.type.AtlasType;
import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.security.UserGroupInformation;
@@ -45,10 +42,7 @@ import javax.ws.rs.HttpMethod;
import javax.ws.rs.core.Cookie;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
+import java.util.*;
/**
* Client for metadata.
@@ -276,7 +270,7 @@ public class AtlasClient extends AtlasBaseClient {
* @throws AtlasServiceException
*/
public List<String> createType(TypesDef typeDef) throws AtlasServiceException {
- return createType(TypesSerialization.toJson(typeDef));
+ return createType(AtlasType.toV1Json(typeDef));
}
/**
@@ -287,11 +281,11 @@ public class AtlasClient extends AtlasBaseClient {
* @return the list of types created
* @throws AtlasServiceException
*/
- public List<String> createTraitType(String traitName, ImmutableSet<String> superTraits, AttributeDefinition... attributeDefinitions) throws AtlasServiceException {
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, superTraits, attributeDefinitions);
+ public List<String> createTraitType(String traitName, Set<String> superTraits, AttributeDefinition... attributeDefinitions) throws AtlasServiceException {
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, superTraits, Arrays.asList(attributeDefinitions));
- String traitDefinitionAsJSON = TypesSerialization.toJson(piiTrait, true);
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("Creating trait type {} {}", traitName, traitDefinitionAsJSON);
return createType(traitDefinitionAsJSON);
}
@@ -332,7 +326,7 @@ public class AtlasClient extends AtlasBaseClient {
* @throws AtlasServiceException
*/
public List<String> updateType(TypesDef typeDef) throws AtlasServiceException {
- return updateType(TypesSerialization.toJson(typeDef));
+ return updateType(AtlasType.toV1Json(typeDef));
}
/**
@@ -396,7 +390,7 @@ public class AtlasClient extends AtlasBaseClient {
try {
JSONObject response = callAPIWithBodyAndParams(API_V1.GET_TYPE, null, typeName);
String typeJson = response.getString(DEFINITION);
- return TypesSerialization.fromJson(typeJson);
+ return AtlasType.fromV1Json(typeJson, TypesDef.class);
} catch (JSONException e) {
throw new AtlasServiceException(e);
}
@@ -442,7 +436,7 @@ public class AtlasClient extends AtlasBaseClient {
private JSONArray getEntitiesArray(Collection<Referenceable> entities) {
JSONArray entityArray = new JSONArray(entities.size());
for (Referenceable entity : entities) {
- entityArray.put(InstanceSerialization.toJson(entity, true));
+ entityArray.put(AtlasType.toV1Json(entity));
}
return entityArray;
}
@@ -500,7 +494,7 @@ public class AtlasClient extends AtlasBaseClient {
* @param entity entity definition
*/
public EntityResult updateEntity(String guid, Referenceable entity) throws AtlasServiceException {
- String entityJson = InstanceSerialization.toJson(entity, true);
+ String entityJson = AtlasType.toV1Json(entity);
LOG.debug("Updating entity id {} with {}", guid, entityJson);
JSONObject response = callAPIWithBodyAndParams(API_V1.UPDATE_ENTITY_PARTIAL, entityJson, guid);
return extractEntityResult(response);
@@ -513,7 +507,7 @@ public class AtlasClient extends AtlasBaseClient {
* @param traitDefinition trait definition
*/
public void addTrait(String guid, Struct traitDefinition) throws AtlasServiceException {
- String traitJson = InstanceSerialization.toJson(traitDefinition, true);
+ String traitJson = AtlasType.toV1Json(traitDefinition);
LOG.debug("Adding trait to entity with id {} {}", guid, traitJson);
callAPIWithBodyAndParams(API_V1.ADD_TRAITS, traitJson, guid, URI_TRAITS);
}
@@ -540,7 +534,7 @@ public class AtlasClient extends AtlasBaseClient {
final String uniqueAttributeValue,
Referenceable entity) throws AtlasServiceException {
final API api = API_V1.UPDATE_ENTITY_PARTIAL;
- String entityJson = InstanceSerialization.toJson(entity, true);
+ String entityJson = AtlasType.toV1Json(entity);
LOG.debug("Updating entity type: {}, attributeName: {}, attributeValue: {}, entity: {}", entityType,
uniqueAttributeName, uniqueAttributeValue, entityJson);
JSONObject response = callAPIWithRetries(api, entityJson, new ResourceCreator() {
@@ -623,7 +617,7 @@ public class AtlasClient extends AtlasBaseClient {
JSONObject jsonResponse = callAPIWithBodyAndParams(API_V1.GET_ENTITY, null, guid);
try {
String entityInstanceDefinition = jsonResponse.getString(AtlasClient.DEFINITION);
- return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
+ return AtlasType.fromV1Json(entityInstanceDefinition, Referenceable.class);
} catch (JSONException e) {
throw new AtlasServiceException(API_V1.GET_ENTITY, e);
}
@@ -660,7 +654,7 @@ public class AtlasClient extends AtlasBaseClient {
});
try {
String entityInstanceDefinition = jsonResponse.getString(AtlasClient.DEFINITION);
- return InstanceSerialization.fromJsonReferenceable(entityInstanceDefinition, true);
+ return AtlasType.fromV1Json(entityInstanceDefinition, Referenceable.class);
} catch (JSONException e) {
throw new AtlasServiceException(api, e);
}
@@ -706,7 +700,7 @@ public class AtlasClient extends AtlasBaseClient {
List<JSONObject> traitDefList = extractResults(jsonResponse, AtlasClient.RESULTS, new ExtractOperation<JSONObject, JSONObject>());
ArrayList<Struct> traitStructList = new ArrayList<>();
for (JSONObject traitDef : traitDefList) {
- Struct traitStruct = InstanceSerialization.fromJsonStruct(traitDef.toString(), true);
+ Struct traitStruct = AtlasType.fromV1Json(traitDef.toString(), Struct.class);
traitStructList.add(traitStruct);
}
return traitStructList;
@@ -723,7 +717,7 @@ public class AtlasClient extends AtlasBaseClient {
JSONObject jsonResponse = callAPIWithBodyAndParams(API_V1.GET_TRAIT_DEFINITION, null, guid, TRAIT_DEFINITIONS, traitName);
try {
- return InstanceSerialization.fromJsonStruct(jsonResponse.getString(AtlasClient.RESULTS), false);
+ return AtlasType.fromV1Json(jsonResponse.getString(AtlasClient.RESULTS), Struct.class);
} catch (JSONException e) {
throw new AtlasServiceException(API_V1.GET_TRAIT_DEFINITION, e);
}
@@ -782,7 +776,7 @@ public class AtlasClient extends AtlasBaseClient {
return extractResults(jsonResponse, AtlasClient.EVENTS, new ExtractOperation<EntityAuditEvent, JSONObject>() {
@Override
EntityAuditEvent extractElement(JSONObject element) throws JSONException {
- return SerDe.GSON.fromJson(element.toString(), EntityAuditEvent.class);
+ return AtlasType.fromV1Json(element.toString(), EntityAuditEvent.class);
}
});
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java
----------------------------------------------------------------------
diff --git a/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java b/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java
index 904674d..567205f 100644
--- a/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java
+++ b/client/client-v1/src/main/java/org/apache/atlas/EntityAuditEvent.java
@@ -18,8 +18,9 @@
package org.apache.atlas;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
+
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.type.AtlasType;
import java.util.Objects;
@@ -38,13 +39,13 @@ public class EntityAuditEvent {
private EntityAuditAction action;
private String details;
private String eventKey;
- private IReferenceableInstance entityDefinition;
+ private Referenceable entityDefinition;
public EntityAuditEvent() {
}
public EntityAuditEvent(String entityId, Long ts, String user, EntityAuditAction action, String details,
- IReferenceableInstance entityDefinition) throws AtlasException {
+ Referenceable entityDefinition) throws AtlasException {
this.entityId = entityId;
this.timestamp = ts;
this.user = user;
@@ -74,11 +75,11 @@ public class EntityAuditEvent {
@Override
public String toString() {
- return SerDe.GSON.toJson(this);
+ return AtlasType.toV1Json(this);
}
public static EntityAuditEvent fromString(String eventString) {
- return SerDe.GSON.fromJson(eventString, EntityAuditEvent.class);
+ return AtlasType.fromV1Json(eventString, EntityAuditEvent.class);
}
public String getEntityId() {
@@ -129,18 +130,18 @@ public class EntityAuditEvent {
this.eventKey = eventKey;
}
- public IReferenceableInstance getEntityDefinition() {
+ public Referenceable getEntityDefinition() {
return entityDefinition;
}
public String getEntityDefinitionString() {
if (entityDefinition != null) {
- return InstanceSerialization.toJson(entityDefinition, true);
+ return AtlasType.toV1Json(entityDefinition);
}
return null;
}
public void setEntityDefinition(String entityDefinition) {
- this.entityDefinition = InstanceSerialization.fromJsonReferenceable(entityDefinition, true);
+ this.entityDefinition = AtlasType.fromV1Json(entityDefinition, Referenceable.class);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/client-v1/src/main/java/org/apache/atlas/SerDe.java
----------------------------------------------------------------------
diff --git a/client/client-v1/src/main/java/org/apache/atlas/SerDe.java b/client/client-v1/src/main/java/org/apache/atlas/SerDe.java
deleted file mode 100644
index cdc3509..0000000
--- a/client/client-v1/src/main/java/org/apache/atlas/SerDe.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.google.gson.JsonDeserializationContext;
-import com.google.gson.JsonDeserializer;
-import com.google.gson.JsonElement;
-import com.google.gson.JsonParser;
-import com.google.gson.JsonSerializationContext;
-import com.google.gson.JsonSerializer;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-
-import java.lang.reflect.Type;
-
-@Deprecated
-public class SerDe {
- public static final Gson GSON = new GsonBuilder().
- registerTypeAdapter(IStruct.class, new StructDeserializer()).
- registerTypeAdapter(IReferenceableInstance.class, new ReferenceableSerializerDeserializer()).
- registerTypeAdapter(Referenceable.class, new ReferenceableSerializerDeserializer()).
- create();
-
- /**
- * Serde for Struct used by AbstractNotificationConsumer.GSON.
- */
- public static final class StructDeserializer implements JsonDeserializer<IStruct>, JsonSerializer<IStruct> {
- @Override
- public IStruct deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
- return context.deserialize(json, Struct.class);
- }
-
- @Override
- public JsonElement serialize(IStruct src, Type typeOfSrc, JsonSerializationContext context) {
- String instanceJson = InstanceSerialization.toJson(src, true);
- return new JsonParser().parse(instanceJson).getAsJsonObject();
- }
- }
-
- /**
- * Serde for Referenceable used by AbstractNotificationConsumer.GSON.
- */
- public static final class ReferenceableSerializerDeserializer implements JsonDeserializer<IStruct>,
- JsonSerializer<IReferenceableInstance> {
- @Override
- public IReferenceableInstance deserialize(final JsonElement json, final Type type,
- final JsonDeserializationContext context) {
-
- return InstanceSerialization.fromJsonReferenceable(json.toString(), true);
- }
-
- @Override
- public JsonElement serialize(IReferenceableInstance src, Type typeOfSrc, JsonSerializationContext context) {
- String instanceJson = InstanceSerialization.toJson(src, true);
- return new JsonParser().parse(instanceJson).getAsJsonObject();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java
----------------------------------------------------------------------
diff --git a/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java b/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java
index c387be1..5287af4 100644
--- a/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java
+++ b/client/client-v1/src/test/java/org/apache/atlas/AtlasClientTest.java
@@ -22,8 +22,8 @@ import com.sun.jersey.api.client.ClientHandlerException;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.type.AtlasType;
import org.apache.commons.configuration.Configuration;
import org.apache.hadoop.security.UserGroupInformation;
import org.codehaus.jettison.json.JSONObject;
@@ -99,7 +99,7 @@ public class AtlasClientTest {
JSONObject jsonResponse = new JSONObject(new EntityResult(Arrays.asList("id"), null, null).toString());
when(response.getEntity(String.class)).thenReturn(jsonResponse.toString());
when(response.getLength()).thenReturn(jsonResponse.length());
- String entityJson = InstanceSerialization.toJson(new Referenceable("type"), true);
+ String entityJson = AtlasType.toV1Json(new Referenceable("type"));
when(builder.method(anyString(), Matchers.<Class>any(), anyString())).thenReturn(response);
List<String> ids = atlasClient.createEntity(entityJson);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/client/pom.xml
----------------------------------------------------------------------
diff --git a/client/pom.xml b/client/pom.xml
index b69c077..728c706 100755
--- a/client/pom.xml
+++ b/client/pom.xml
@@ -39,16 +39,22 @@
<dependencies>
<dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <scope>test</scope>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
</dependency>
+
<dependency>
<groupId>com.sun.jersey.contribs</groupId>
<artifactId>jersey-multipart</artifactId>
</dependency>
<dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<scope>test</scope>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/common/pom.xml
----------------------------------------------------------------------
diff --git a/common/pom.xml b/common/pom.xml
index c2e5da6..6d7515b 100644
--- a/common/pom.xml
+++ b/common/pom.xml
@@ -76,6 +76,7 @@
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>${guava.version}</version>
+ <scope>test</scope>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/common/src/main/java/org/apache/atlas/AtlasConfiguration.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/atlas/AtlasConfiguration.java b/common/src/main/java/org/apache/atlas/AtlasConfiguration.java
deleted file mode 100644
index bd2bf7f..0000000
--- a/common/src/main/java/org/apache/atlas/AtlasConfiguration.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import org.apache.commons.configuration.Configuration;
-
-/**
- * Enum that encapsulated each property name and its default value.
- */
-public enum AtlasConfiguration {
- //web server configuration
- WEBSERVER_MIN_THREADS("atlas.webserver.minthreads", 10),
- WEBSERVER_MAX_THREADS("atlas.webserver.maxthreads", 100),
- WEBSERVER_KEEPALIVE_SECONDS("atlas.webserver.keepalivetimesecs", 60),
- WEBSERVER_QUEUE_SIZE("atlas.webserver.queuesize", 100),
- WEBSERVER_REQUEST_BUFFER_SIZE("atlas.jetty.request.buffer.size", 16192),
-
- QUERY_PARAM_MAX_LENGTH("atlas.query.param.max.length", 4*1024),
-
- NOTIFICATION_MESSAGE_MAX_LENGTH_BYTES("atlas.notification.message.max.length.bytes", (1000 * 1000)),
- NOTIFICATION_MESSAGE_COMPRESSION_ENABLED("atlas.notification.message.compression.enabled", true),
- NOTIFICATION_SPLIT_MESSAGE_SEGMENTS_WAIT_TIME_SECONDS("atlas.notification.split.message.segments.wait.time.seconds", 15 * 60),
- NOTIFICATION_SPLIT_MESSAGE_BUFFER_PURGE_INTERVAL_SECONDS("atlas.notification.split.message.buffer.purge.interval.seconds", 5 * 60),
-
- //search configuration
- SEARCH_MAX_LIMIT("atlas.search.maxlimit", 10000),
- SEARCH_DEFAULT_LIMIT("atlas.search.defaultlimit", 100);
-
- private static final Configuration APPLICATION_PROPERTIES;
-
- static {
- try {
- APPLICATION_PROPERTIES = ApplicationProperties.get();
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
- private final String propertyName;
- private final Object defaultValue;
-
- AtlasConfiguration(String propertyName, Object defaultValue) {
- this.propertyName = propertyName;
- this.defaultValue = defaultValue;
- }
-
- public int getInt() {
- return APPLICATION_PROPERTIES.getInt(propertyName, Integer.valueOf(defaultValue.toString()).intValue());
- }
-
- public long getLong() {
- return APPLICATION_PROPERTIES.getLong(propertyName, Long.valueOf(defaultValue.toString()).longValue());
- }
-
- public boolean getBoolean() {
- return APPLICATION_PROPERTIES.getBoolean(propertyName, Boolean.valueOf(defaultValue.toString()).booleanValue());
- }
-
- public String getString() {
- return APPLICATION_PROPERTIES.getString(propertyName, defaultValue.toString());
- }
-
- public Object get() {
- Object value = APPLICATION_PROPERTIES.getProperty(propertyName);
- return value == null ? defaultValue : value;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/common/src/main/java/org/apache/atlas/repository/Constants.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/atlas/repository/Constants.java b/common/src/main/java/org/apache/atlas/repository/Constants.java
index 5475514..8eda22d 100644
--- a/common/src/main/java/org/apache/atlas/repository/Constants.java
+++ b/common/src/main/java/org/apache/atlas/repository/Constants.java
@@ -104,6 +104,13 @@ public final class Constants {
public static final String MAX_FULLTEXT_QUERY_STR_LENGTH = "atlas.graph.fulltext-max-query-str-length";
public static final String MAX_DSL_QUERY_STR_LENGTH = "atlas.graph.dsl-max-query-str-length";
+ public static final String ATTRIBUTE_NAME_GUID = "guid";
+ public static final String ATTRIBUTE_NAME_TYPENAME = "typeName";
+ public static final String ATTRIBUTE_NAME_SUPERTYPENAMES = "superTypeNames";
+ public static final String ATTRIBUTE_NAME_STATE = "state";
+ public static final String ATTRIBUTE_NAME_VERSION = "version";
+ public static final String TEMP_STRUCT_NAME_PREFIX = "__tempQueryResultStruct";
+
private Constants() {
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/api/pom.xml
----------------------------------------------------------------------
diff --git a/graphdb/api/pom.xml b/graphdb/api/pom.xml
index c949a98..d0462b4 100644
--- a/graphdb/api/pom.xml
+++ b/graphdb/api/pom.xml
@@ -32,10 +32,14 @@
<packaging>jar</packaging>
<dependencies>
+ <dependency>
+ <groupId>org.apache.atlas</groupId>
+ <artifactId>atlas-common</artifactId>
+ </dependency>
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
+ <artifactId>atlas-intg</artifactId>
</dependency>
<dependency>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java
----------------------------------------------------------------------
diff --git a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java
index dded76f..31d2085 100644
--- a/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java
+++ b/graphdb/api/src/main/java/org/apache/atlas/repository/graphdb/AtlasGraph.java
@@ -27,7 +27,7 @@ import javax.script.ScriptException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.type.AtlasType;
/**
* Represents a graph.
@@ -110,7 +110,6 @@ public interface AtlasGraph<V, E> {
* Gets the names of the indexes on edges
* type.
*
- * @param type
* @return
*/
Set<String> getEdgeIndexKeys();
@@ -120,7 +119,6 @@ public interface AtlasGraph<V, E> {
* Gets the names of the indexes on vertices.
* type.
*
- * @param type
* @return
*/
Set<String> getVertexIndexKeys();
@@ -218,11 +216,11 @@ public interface AtlasGraph<V, E> {
* convert property values from the value that is stored in the graph
* to the value/type that the user expects to get back.
*
- * @param expr - gremlin expr that represents the persistent property value
+ * @param valueExpr - gremlin expr that represents the persistent property value
* @param type
* @return
*/
- GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression valueExpr, IDataType<?> type);
+ GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression valueExpr, AtlasType type);
/**
* Indicates whether or not stored values with the specified type need to be converted
@@ -234,7 +232,7 @@ public interface AtlasGraph<V, E> {
* gremlin expression with the converted value. In addition, this cause the gremlin
* 'filter' step to be used to compare the values instead of a 'has' step.
*/
- boolean isPropertyValueConversionNeeded(IDataType<?> type);
+ boolean isPropertyValueConversionNeeded(AtlasType type);
/**
* Gets the version of Gremlin that this graph uses.
@@ -286,7 +284,7 @@ public interface AtlasGraph<V, E> {
/**
* Executes a Gremlin script, returns an object with the result.
*
- * @param gremlinQuery
+ * @param query
* @param isPath whether this is a path query
*
* @return the result from executing the script
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/common/pom.xml
----------------------------------------------------------------------
diff --git a/graphdb/common/pom.xml b/graphdb/common/pom.xml
index d290b92..71e3bbc 100644
--- a/graphdb/common/pom.xml
+++ b/graphdb/common/pom.xml
@@ -34,6 +34,12 @@ under the License. -->
</dependency>
<dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ </dependency>
+
+ <dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.9.4</version>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java
----------------------------------------------------------------------
diff --git a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java
index bc63a69..7c96806 100644
--- a/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java
+++ b/graphdb/janus/src/main/java/org/apache/atlas/repository/graphdb/janus/AtlasJanusGraph.java
@@ -21,6 +21,7 @@ import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import org.apache.atlas.type.AtlasType;
import org.janusgraph.core.Cardinality;
import org.janusgraph.core.PropertyKey;
import org.janusgraph.core.SchemaViolationException;
@@ -42,7 +43,6 @@ import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.graphdb.GremlinVersion;
import org.apache.atlas.repository.graphdb.janus.query.AtlasJanusGraphQuery;
import org.apache.atlas.repository.graphdb.utils.IteratorToIterableAdapter;
-import org.apache.atlas.typesystem.types.IDataType;
import org.apache.tinkerpop.gremlin.groovy.CompilerCustomizerProvider;
import org.apache.tinkerpop.gremlin.groovy.DefaultImportCustomizerProvider;
import org.apache.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngine;
@@ -359,13 +359,13 @@ public class AtlasJanusGraph implements AtlasGraph<AtlasJanusVertex, AtlasJanusE
}
@Override
- public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, IDataType<?> type) {
+ public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, AtlasType type) {
//nothing special needed, value is stored in required type
return expr;
}
@Override
- public boolean isPropertyValueConversionNeeded(IDataType<?> type) {
+ public boolean isPropertyValueConversionNeeded(AtlasType type) {
return false;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/titan0/src/main/java/org/apache/atlas/repository/graphdb/titan0/Titan0Graph.java
----------------------------------------------------------------------
diff --git a/graphdb/titan0/src/main/java/org/apache/atlas/repository/graphdb/titan0/Titan0Graph.java b/graphdb/titan0/src/main/java/org/apache/atlas/repository/graphdb/titan0/Titan0Graph.java
index 2408287..4409009 100644
--- a/graphdb/titan0/src/main/java/org/apache/atlas/repository/graphdb/titan0/Titan0Graph.java
+++ b/graphdb/titan0/src/main/java/org/apache/atlas/repository/graphdb/titan0/Titan0Graph.java
@@ -45,7 +45,7 @@ import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.graphdb.GremlinVersion;
import org.apache.atlas.repository.graphdb.titan0.query.Titan0GraphQuery;
import org.apache.atlas.repository.graphdb.utils.IteratorToIterableAdapter;
-import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.type.AtlasType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -348,14 +348,14 @@ public class Titan0Graph implements AtlasGraph<Titan0Vertex, Titan0Edge> {
}
@Override
- public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, IDataType<?> type) {
+ public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, AtlasType type) {
//nothing special needed, value is stored in required type
return expr;
}
@Override
- public boolean isPropertyValueConversionNeeded(IDataType<?> type) {
+ public boolean isPropertyValueConversionNeeded(AtlasType type) {
return false;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/graphdb/titan1/src/main/java/org/apache/atlas/repository/graphdb/titan1/Titan1Graph.java
----------------------------------------------------------------------
diff --git a/graphdb/titan1/src/main/java/org/apache/atlas/repository/graphdb/titan1/Titan1Graph.java b/graphdb/titan1/src/main/java/org/apache/atlas/repository/graphdb/titan1/Titan1Graph.java
index ffb6b37..d94289d 100644
--- a/graphdb/titan1/src/main/java/org/apache/atlas/repository/graphdb/titan1/Titan1Graph.java
+++ b/graphdb/titan1/src/main/java/org/apache/atlas/repository/graphdb/titan1/Titan1Graph.java
@@ -42,7 +42,7 @@ import org.apache.atlas.repository.graphdb.AtlasVertex;
import org.apache.atlas.repository.graphdb.GremlinVersion;
import org.apache.atlas.repository.graphdb.titan1.query.Titan1GraphQuery;
import org.apache.atlas.repository.graphdb.utils.IteratorToIterableAdapter;
-import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.type.AtlasType;
import org.apache.tinkerpop.gremlin.groovy.CompilerCustomizerProvider;
import org.apache.tinkerpop.gremlin.groovy.DefaultImportCustomizerProvider;
import org.apache.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngine;
@@ -359,13 +359,13 @@ public class Titan1Graph implements AtlasGraph<Titan1Vertex, Titan1Edge> {
}
@Override
- public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, IDataType<?> type) {
+ public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, AtlasType type) {
//nothing special needed, value is stored in required type
return expr;
}
@Override
- public boolean isPropertyValueConversionNeeded(IDataType<?> type) {
+ public boolean isPropertyValueConversionNeeded(AtlasType type) {
return false;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/pom.xml
----------------------------------------------------------------------
diff --git a/intg/pom.xml b/intg/pom.xml
index 2f801e7..22895df 100644
--- a/intg/pom.xml
+++ b/intg/pom.xml
@@ -36,11 +36,6 @@
</dependency>
<dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- </dependency>
-
- <dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
</dependency>
@@ -58,20 +53,23 @@
</dependency>
<dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- </dependency>
-
- <dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<scope>test</scope>
</dependency>
+
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>${spring.version}</version>
</dependency>
+
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ <version>${guava.version}</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
@@ -88,6 +86,14 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ </configuration>
+ </plugin>
</plugins>
</build>
</project>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java b/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
new file mode 100644
index 0000000..bd2bf7f
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/AtlasConfiguration.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas;
+
+import org.apache.commons.configuration.Configuration;
+
+/**
+ * Enum that encapsulated each property name and its default value.
+ */
+public enum AtlasConfiguration {
+ //web server configuration
+ WEBSERVER_MIN_THREADS("atlas.webserver.minthreads", 10),
+ WEBSERVER_MAX_THREADS("atlas.webserver.maxthreads", 100),
+ WEBSERVER_KEEPALIVE_SECONDS("atlas.webserver.keepalivetimesecs", 60),
+ WEBSERVER_QUEUE_SIZE("atlas.webserver.queuesize", 100),
+ WEBSERVER_REQUEST_BUFFER_SIZE("atlas.jetty.request.buffer.size", 16192),
+
+ QUERY_PARAM_MAX_LENGTH("atlas.query.param.max.length", 4*1024),
+
+ NOTIFICATION_MESSAGE_MAX_LENGTH_BYTES("atlas.notification.message.max.length.bytes", (1000 * 1000)),
+ NOTIFICATION_MESSAGE_COMPRESSION_ENABLED("atlas.notification.message.compression.enabled", true),
+ NOTIFICATION_SPLIT_MESSAGE_SEGMENTS_WAIT_TIME_SECONDS("atlas.notification.split.message.segments.wait.time.seconds", 15 * 60),
+ NOTIFICATION_SPLIT_MESSAGE_BUFFER_PURGE_INTERVAL_SECONDS("atlas.notification.split.message.buffer.purge.interval.seconds", 5 * 60),
+
+ //search configuration
+ SEARCH_MAX_LIMIT("atlas.search.maxlimit", 10000),
+ SEARCH_DEFAULT_LIMIT("atlas.search.defaultlimit", 100);
+
+ private static final Configuration APPLICATION_PROPERTIES;
+
+ static {
+ try {
+ APPLICATION_PROPERTIES = ApplicationProperties.get();
+ } catch (AtlasException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private final String propertyName;
+ private final Object defaultValue;
+
+ AtlasConfiguration(String propertyName, Object defaultValue) {
+ this.propertyName = propertyName;
+ this.defaultValue = defaultValue;
+ }
+
+ public int getInt() {
+ return APPLICATION_PROPERTIES.getInt(propertyName, Integer.valueOf(defaultValue.toString()).intValue());
+ }
+
+ public long getLong() {
+ return APPLICATION_PROPERTIES.getLong(propertyName, Long.valueOf(defaultValue.toString()).longValue());
+ }
+
+ public boolean getBoolean() {
+ return APPLICATION_PROPERTIES.getBoolean(propertyName, Boolean.valueOf(defaultValue.toString()).booleanValue());
+ }
+
+ public String getString() {
+ return APPLICATION_PROPERTIES.getString(propertyName, defaultValue.toString());
+ }
+
+ public Object get() {
+ Object value = APPLICATION_PROPERTIES.getProperty(propertyName);
+ return value == null ? defaultValue : value;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationBaseMessage.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationBaseMessage.java b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationBaseMessage.java
new file mode 100644
index 0000000..2411808
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationBaseMessage.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.model.notification;
+
+
+import org.apache.atlas.AtlasConfiguration;
+import org.apache.commons.codec.binary.Base64;
+import org.apache.commons.codec.binary.StringUtils;
+import org.apache.commons.compress.utils.IOUtils;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class AtlasNotificationBaseMessage {
+ private static final Logger LOG = LoggerFactory.getLogger(AtlasNotificationBaseMessage.class);
+
+ public static final int MESSAGE_MAX_LENGTH_BYTES = AtlasConfiguration.NOTIFICATION_MESSAGE_MAX_LENGTH_BYTES.getInt() - 512; // 512 bytes for envelop;
+ public static final boolean MESSAGE_COMPRESSION_ENABLED = AtlasConfiguration.NOTIFICATION_MESSAGE_COMPRESSION_ENABLED.getBoolean();
+
+ public enum CompressionKind { NONE, GZIP };
+
+ private MessageVersion version = null;
+ private String msgId = null;
+ private CompressionKind msgCompressionKind = CompressionKind.NONE;
+ private int msgSplitIdx = 1;
+ private int msgSplitCount = 1;
+
+
+ public AtlasNotificationBaseMessage() {
+ }
+
+ public AtlasNotificationBaseMessage(MessageVersion version) {
+ this(version, null, CompressionKind.NONE);
+ }
+
+ public AtlasNotificationBaseMessage(MessageVersion version, String msgId, CompressionKind msgCompressionKind) {
+ this.version = version;
+ this.msgId = msgId;
+ this.msgCompressionKind = msgCompressionKind;
+ }
+
+ public AtlasNotificationBaseMessage(MessageVersion version, String msgId, CompressionKind msgCompressionKind, int msgSplitIdx, int msgSplitCount) {
+ this.version = version;
+ this.msgId = msgId;
+ this.msgCompressionKind = msgCompressionKind;
+ this.msgSplitIdx = msgSplitIdx;
+ this.msgSplitCount = msgSplitCount;
+ }
+
+ public void setVersion(MessageVersion version) {
+ this.version = version;
+ }
+
+ public MessageVersion getVersion() {
+ return version;
+ }
+
+ public String getMsgId() {
+ return msgId;
+ }
+
+ public void setMsgId(String msgId) {
+ this.msgId = msgId;
+ }
+
+ public CompressionKind getMsgCompressionKind() {
+ return msgCompressionKind;
+ }
+
+ public void setMsgCompressed(CompressionKind msgCompressionKind) {
+ this.msgCompressionKind = msgCompressionKind;
+ }
+
+ public int getMsgSplitIdx() {
+ return msgSplitIdx;
+ }
+
+ public void setMsgSplitIdx(int msgSplitIdx) {
+ this.msgSplitIdx = msgSplitIdx;
+ }
+
+ public int getMsgSplitCount() {
+ return msgSplitCount;
+ }
+
+ public void setMsgSplitCount(int msgSplitCount) {
+ this.msgSplitCount = msgSplitCount;
+ }
+
+ /**
+ * Compare the version of this message with the given version.
+ *
+ * @param compareToVersion the version to compare to
+ *
+ * @return a negative integer, zero, or a positive integer as this message's version is less than, equal to,
+ * or greater than the given version.
+ */
+ public int compareVersion(MessageVersion compareToVersion) {
+ return version.compareTo(compareToVersion);
+ }
+
+
+ public static byte[] getBytesUtf8(String str) {
+ return StringUtils.getBytesUtf8(str);
+ }
+
+ public static String getStringUtf8(byte[] bytes) {
+ return StringUtils.newStringUtf8(bytes);
+ }
+
+ public static byte[] encodeBase64(byte[] bytes) {
+ return Base64.encodeBase64(bytes);
+ }
+
+ public static byte[] decodeBase64(byte[] bytes) {
+ return Base64.decodeBase64(bytes);
+ }
+
+ public static byte[] gzipCompressAndEncodeBase64(byte[] bytes) {
+ return encodeBase64(gzipCompress(bytes));
+ }
+
+ public static byte[] decodeBase64AndGzipUncompress(byte[] bytes) {
+ return gzipUncompress(decodeBase64(bytes));
+ }
+
+ public static String gzipCompress(String str) {
+ byte[] bytes = getBytesUtf8(str);
+ byte[] compressedBytes = gzipCompress(bytes);
+ byte[] encodedBytes = encodeBase64(compressedBytes);
+
+ return getStringUtf8(encodedBytes);
+ }
+
+ public static String gzipUncompress(String str) {
+ byte[] encodedBytes = getBytesUtf8(str);
+ byte[] compressedBytes = decodeBase64(encodedBytes);
+ byte[] bytes = gzipUncompress(compressedBytes);
+
+ return getStringUtf8(bytes);
+ }
+
+ public static byte[] gzipCompress(byte[] content) {
+ ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
+
+ try {
+ GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream);
+
+ gzipOutputStream.write(content);
+ gzipOutputStream.close();
+ } catch (IOException e) {
+ LOG.error("gzipCompress(): error compressing {} bytes", content.length, e);
+
+ throw new RuntimeException(e);
+ }
+
+ return byteArrayOutputStream.toByteArray();
+ }
+
+ public static byte[] gzipUncompress(byte[] content) {
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+ try {
+ IOUtils.copy(new GZIPInputStream(new ByteArrayInputStream(content)), out);
+ } catch (IOException e) {
+ LOG.error("gzipUncompress(): error uncompressing {} bytes", content.length, e);
+ }
+
+ return out.toByteArray();
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationMessage.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationMessage.java b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationMessage.java
new file mode 100644
index 0000000..5a5b63f
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationMessage.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.model.notification;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.util.Date;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Represents a notification message that is associated with a version.
+ */
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class AtlasNotificationMessage<T> extends AtlasNotificationBaseMessage {
+ private String msgSourceIP;
+ private String msgCreatedBy;
+ private long msgCreationTime;
+
+ /**
+ * The actual message.
+ */
+ private T message;
+
+
+ // ----- Constructors ----------------------------------------------------
+ public AtlasNotificationMessage() {
+ }
+
+ public AtlasNotificationMessage(MessageVersion version, T message) {
+ this(version, message, null, null);
+ }
+
+ public AtlasNotificationMessage(MessageVersion version, T message, String msgSourceIP, String createdBy) {
+ super(version);
+
+ this.msgSourceIP = msgSourceIP;
+ this.msgCreatedBy = createdBy;
+ this.msgCreationTime = (new Date()).getTime();
+ this.message = message;
+ }
+
+
+ public String getMsgSourceIP() {
+ return msgSourceIP;
+ }
+
+ public void setMsgSourceIP(String msgSourceIP) {
+ this.msgSourceIP = msgSourceIP;
+ }
+
+ public String getMsgCreatedBy() {
+ return msgCreatedBy;
+ }
+
+ public void setMsgCreatedBy(String msgCreatedBy) {
+ this.msgCreatedBy = msgCreatedBy;
+ }
+
+ public long getMsgCreationTime() {
+ return msgCreationTime;
+ }
+
+ public void setMsgCreationTime(long msgCreationTime) {
+ this.msgCreationTime = msgCreationTime;
+ }
+
+ public T getMessage() {
+ return message;
+ }
+
+ public void setMessage(T message) {
+ this.message = message;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationStringMessage.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationStringMessage.java b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationStringMessage.java
new file mode 100644
index 0000000..9064b6c
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/AtlasNotificationStringMessage.java
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.model.notification;
+
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class AtlasNotificationStringMessage extends AtlasNotificationBaseMessage {
+ private String message = null;
+
+ public AtlasNotificationStringMessage() {
+ super(MessageVersion.CURRENT_VERSION);
+ }
+
+ public AtlasNotificationStringMessage(String message) {
+ super(MessageVersion.CURRENT_VERSION);
+
+ this.message = message;
+ }
+
+ public AtlasNotificationStringMessage(String message, String msgId, CompressionKind compressionKind) {
+ super(MessageVersion.CURRENT_VERSION, msgId, compressionKind);
+
+ this.message = message;
+ }
+
+ public AtlasNotificationStringMessage(String message, String msgId, CompressionKind compressionKind, int msgSplitIdx, int msgSplitCount) {
+ super(MessageVersion.CURRENT_VERSION, msgId, compressionKind, msgSplitIdx, msgSplitCount);
+
+ this.message = message;
+ }
+
+ public AtlasNotificationStringMessage(byte[] encodedBytes, String msgId, CompressionKind compressionKind) {
+ super(MessageVersion.CURRENT_VERSION, msgId, compressionKind);
+
+ this.message = AtlasNotificationBaseMessage.getStringUtf8(encodedBytes);
+ }
+
+ public AtlasNotificationStringMessage(byte[] encodedBytes, int offset, int length, String msgId, CompressionKind compressionKind, int msgSplitIdx, int msgSplitCount) {
+ super(MessageVersion.CURRENT_VERSION, msgId, compressionKind, msgSplitIdx, msgSplitCount);
+
+ this.message = new String(encodedBytes, offset, length);
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/EntityNotification.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/EntityNotification.java b/intg/src/main/java/org/apache/atlas/model/notification/EntityNotification.java
new file mode 100644
index 0000000..3d03457
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/EntityNotification.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.model.notification;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Base type of hook message.
+ */
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class EntityNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * Type of the hook message.
+ */
+ public enum EntityNotificationType {
+ ENTITY_NOTIFICATION_V1
+ }
+
+ protected EntityNotificationType type;
+
+ public EntityNotification() {
+ this.type = EntityNotificationType.ENTITY_NOTIFICATION_V1;
+ }
+
+ public EntityNotification(EntityNotificationType type) {
+ this.type = type;
+ }
+
+ public EntityNotificationType getType() {
+ return type;
+ }
+
+ public void setType(EntityNotificationType type) {
+ this.type = type;
+ }
+
+ public void normalize() { }
+
+ @Override
+ public String toString() {
+ return toString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityNotification{");
+ sb.append("type=").append(type);
+ sb.append("}");
+
+ return sb;
+ }
+}
[34/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/GremlinExpressionFactory.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/GremlinExpressionFactory.java b/repository/src/main/java/org/apache/atlas/gremlin/GremlinExpressionFactory.java
deleted file mode 100644
index d603150..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/GremlinExpressionFactory.java
+++ /dev/null
@@ -1,658 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS_METHOD IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.ArithmeticExpression;
-import org.apache.atlas.groovy.ArithmeticExpression.ArithmeticOperator;
-import org.apache.atlas.groovy.CastExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.FieldExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-import org.apache.atlas.groovy.ListExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.apache.atlas.groovy.TypeCoersionExpression;
-import org.apache.atlas.groovy.VariableAssignmentExpression;
-import org.apache.atlas.query.GraphPersistenceStrategies;
-import org.apache.atlas.query.IntSequence;
-import org.apache.atlas.query.TypeUtils.FieldInfo;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.GremlinVersion;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.cache.TypeCache.TYPE_FILTER;
-import org.apache.atlas.util.AtlasRepositoryConfiguration;
-
-import com.google.common.collect.ImmutableList;
-
-/**
- * Factory to generate Groovy expressions representing Gremlin syntax that that
- * are independent of the specific version of Gremlin that is being used.
- *
- */
-public abstract class GremlinExpressionFactory {
-
- private static final String G_VARIABLE = "g";
- private static final String IT_VARIABLE = "it";
-
- protected static final String SET_CLASS = "Set";
-
-
- private static final String OBJECT_FIELD = "object";
-
- protected static final String V_METHOD = "V";
- protected static final String FILTER_METHOD = "filter";
- private static final String PATH_METHOD = "path";
- private static final String AS_METHOD = "as";
- private static final String IN_OPERATOR = "in";
- protected static final String HAS_METHOD = "has";
- protected static final String TO_LOWER_CASE_METHOD = "toLowerCase";
- protected static final String SELECT_METHOD = "select";
- protected static final String ORDER_METHOD = "order";
- protected static final String FILL_METHOD = "fill";
- protected static final String MATCHES = "matches";
-
- public static final GremlinExpressionFactory INSTANCE = AtlasGraphProvider.getGraphInstance()
- .getSupportedGremlinVersion() == GremlinVersion.THREE ? new Gremlin3ExpressionFactory()
- : new Gremlin2ExpressionFactory();
-
- /**
- * Returns the unqualified name of the class used in this version of gremlin to
- * represent Gremlin queries as they are being generated.
- * @return
- */
- public abstract String getTraversalExpressionClass();
-
- /**
- * Gets the expression to use as the parent when translating the loop
- * expression in a loop
- *
- * @param inputQry
- * the
- * @return
- */
- public abstract GroovyExpression getLoopExpressionParent(GroovyExpression inputQry);
-
- /**
- * Generates a loop expression.
- *
- * @param parent
- * the parent of the loop expression
- * @param emitExpr
- * Expression with the value that should be emitted by the loop
- * expression.
- * @param loopExpr
- * the query expression that is being executed repeatedly
- * executed in a loop
- * @param alias
- * The alias of the expression being looped over
- * @param times
- * the number of times to repeat, or null if a times condition
- * should not be used.
- * @return
- */
- public abstract GroovyExpression generateLoopExpression(GroovyExpression parent, GraphPersistenceStrategies s, IDataType dataType,
- GroovyExpression loopExpr, String alias, Integer times);
-
-
- /**
- * Generates a logical (and/or) expression with the given operands.
- * @param parent
- * @param operator
- * @param operands
- * @return
- */
- public abstract GroovyExpression generateLogicalExpression(GroovyExpression parent, String operator,
- List<GroovyExpression> operands);
-
- /**
- * Generates a back reference expression that refers to the given alias.
- *
- * @param parent
- * @param inSelect
- * @param alias
- * @return
- */
- public abstract GroovyExpression generateBackReferenceExpression(GroovyExpression parent, boolean inSelect,
- String alias);
-
- /**
- * Generates a select expression
- *
- * @param parent
- * @param sourceNames
- * the names of the select fields
- * @param srcExprs
- * the corresponding values to return
- * @return
- */
- public abstract GroovyExpression generateSelectExpression(GroovyExpression parent,
- List<LiteralExpression> sourceNames, List<GroovyExpression> srcExprs);
-
- /**
- * Generates a an expression that gets the value of the given property from the
- * vertex presented by the parent.
- *
- * @param parent
- * @param fInfo
- * @param propertyName
- * @param inSelect
- * @return
- */
- public abstract GroovyExpression generateFieldExpression(GroovyExpression parent, FieldInfo fInfo,
- String propertyName, boolean inSelect);
-
- /**
- * Generates a has expression that checks whether the vertices match a specific condition
- *
- * @param s
- * @param parent the object that we should call apply the "has" condition to.
- * @param propertyName the name of the property whose value we are comparing
- * @param symbol comparsion operator symbol ('=','<', etc.)
- * @param requiredValue the value to compare against
- * @param fInfo info about the field whose value we are checking
- * @return
- * @throws AtlasException
- */
- public abstract GroovyExpression generateHasExpression(GraphPersistenceStrategies s, GroovyExpression parent,
- String propertyName, String symbol, GroovyExpression requiredValue, FieldInfo fInfo) throws AtlasException;
-
- public abstract GroovyExpression generateLikeExpressionUsingFilter(GroovyExpression parent, String propertyName,
- GroovyExpression propertyValue) throws AtlasException;
-
- /**
- * Generates a range expression
- *
- * @param parent
- * @param startIndex
- * @param endIndex
- * @return
- */
- public abstract GroovyExpression generateRangeExpression(GroovyExpression parent, int startIndex, int endIndex);
-
- /**
- * Determines if the specified expression is a range method call.
- *
- * @param expr
- * @return
- */
- public abstract boolean isRangeExpression(GroovyExpression expr);
-
- /**
- * Set the start index and end index of a range expression
- *
- * @param expr
- * @param startIndex
- * @param endIndex
- */
- public abstract void setRangeParameters(GroovyExpression expr, int startIndex, int endIndex);
-
- /**
- * If the specified function expression is a range expression, returns the start and end index parameters
- * otherwise returns null.
- *
- * @param expr
- * @return int array with two elements - element 0 is start index, element 1 is end index
- */
- public abstract int[] getRangeParameters(AbstractFunctionExpression expr);
-
- /**
- * Generates an order by expression
- *
- * @param parent
- * @param translatedOrderBy
- * @param isAscending
- * @return
- */
- public abstract GroovyExpression generateOrderByExpression(GroovyExpression parent,
- List<GroovyExpression> translatedOrderBy, boolean isAscending);
-
- /**
- * Determines if specified expression is an order method call
- *
- * @param expr
- * @return
- */
- public boolean isOrderExpression(GroovyExpression expr) {
- if (expr instanceof FunctionCallExpression) {
- FunctionCallExpression functionCallExpression = (FunctionCallExpression) expr;
- if (functionCallExpression.getFunctionName().equals(ORDER_METHOD)) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Returns the Groovy expressions that should be used as the parents when
- * translating an order by expression. This is needed because Gremlin 2 and
- * 3 handle order by expressions very differently.
- *
- */
- public abstract List<GroovyExpression> getOrderFieldParents();
-
- /**
- * Returns the expression that represents an anonymous graph traversal.
- *
- * @return
- */
- public abstract GroovyExpression getAnonymousTraversalExpression();
-
- public boolean isLeafAnonymousTraversalExpression(GroovyExpression expr) {
- if(!(expr instanceof FunctionCallExpression)) {
- return false;
- }
- FunctionCallExpression functionCallExpr = (FunctionCallExpression)expr;
- if(functionCallExpr.getCaller() != null) {
- return false;
- }
- return functionCallExpr.getFunctionName().equals("_") & functionCallExpr.getArguments().size() == 0;
- }
-
- /**
- * Returns an expression representing
- *
- * @return
- */
- public abstract GroovyExpression getFieldInSelect();
-
- /**
- * Generates the expression the serves as the root of the Gremlin query.
- * @param varExpr variable containing the vertices to traverse
- * @return
- */
- protected abstract GroovyExpression initialExpression(GroovyExpression varExpr, GraphPersistenceStrategies s);
-
-
- /**
- * Generates an expression that tests whether the vertex represented by the 'toTest'
- * expression represents an instance of the specified type, checking both the type
- * and super type names.
- *
- * @param s
- * @param typeName
- * @param itRef
- * @return
- */
- protected abstract GroovyExpression typeTestExpression(GraphPersistenceStrategies s, String typeName,
- GroovyExpression vertexExpr);
-
- /**
- /**
- * Generates a sequence of groovy expressions that filter the vertices to only
- * those that match the specified type. If GraphPersistenceStrategies.collectTypeInstancesIntoVar()
- * is set and the gremlin optimizer is disabled, the vertices are put into a variable whose name is generated
- * from the specified IntSequence. The last item in the result will be a graph traversal restricted to only
- * the matching vertices.
- */
- public List<GroovyExpression> generateTypeTestExpression(GraphPersistenceStrategies s, GroovyExpression parent,
- String typeName, IntSequence intSeq) throws AtlasException {
-
- if(AtlasRepositoryConfiguration.isGremlinOptimizerEnabled()) {
- GroovyExpression superTypeAttributeNameExpr = new LiteralExpression(s.superTypeAttributeName());
- GroovyExpression typeNameExpr = new LiteralExpression(typeName);
- GroovyExpression superTypeMatchesExpr = new FunctionCallExpression(TraversalStepType.FILTER, HAS_METHOD, superTypeAttributeNameExpr,
- typeNameExpr);
-
- GroovyExpression typeAttributeNameExpr = new LiteralExpression(s.typeAttributeName());
-
- GroovyExpression typeMatchesExpr = new FunctionCallExpression(TraversalStepType.FILTER, HAS_METHOD, typeAttributeNameExpr,
- typeNameExpr);
- GroovyExpression result = new FunctionCallExpression(TraversalStepType.FILTER, parent, "or", typeMatchesExpr, superTypeMatchesExpr);
- return Collections.singletonList(result);
- }
- else {
- if (s.filterBySubTypes()) {
- return typeTestExpressionUsingInFilter(s, parent, typeName);
- } else if (s.collectTypeInstancesIntoVar()) {
- return typeTestExpressionMultiStep(s, typeName, intSeq);
- } else {
- return typeTestExpressionUsingFilter(s, parent, typeName);
- }
- }
- }
-
- private List<GroovyExpression> typeTestExpressionUsingInFilter(GraphPersistenceStrategies s, GroovyExpression parent,
- final String typeName) throws AtlasException {
- List<GroovyExpression> typeNames = new ArrayList<>();
- typeNames.add(new LiteralExpression(typeName));
-
- Map<TYPE_FILTER, String> filters = new HashMap<TYPE_FILTER, String>() {{
- put(TYPE_FILTER.SUPERTYPE, typeName);
- }};
-
- ImmutableList<String> subTypes = TypeSystem.getInstance().getTypeNames(filters);
-
- if (!subTypes.isEmpty()) {
- for (String subType : subTypes) {
- typeNames.add(new LiteralExpression(subType));
- }
- }
-
- GroovyExpression inFilterExpr = generateHasExpression(s, parent, s.typeAttributeName(), IN_OPERATOR,
- new ListExpression(typeNames), null);
-
- return Collections.singletonList(inFilterExpr);
- }
-
- private List<GroovyExpression> typeTestExpressionMultiStep(GraphPersistenceStrategies s, String typeName,
- IntSequence intSeq) {
-
- String varName = "_var_" + intSeq.next();
- GroovyExpression varExpr = new IdentifierExpression(varName);
- List<GroovyExpression> result = new ArrayList<>();
-
- result.add(newSetVar(varName));
- result.add(fillVarWithTypeInstances(s, typeName, varName));
- result.add(fillVarWithSubTypeInstances(s, typeName, varName));
- result.add(initialExpression(varExpr, s));
-
- return result;
- }
-
- private GroovyExpression newSetVar(String varName) {
- GroovyExpression castExpr = new TypeCoersionExpression(new ListExpression(), SET_CLASS);
- return new VariableAssignmentExpression(varName, castExpr);
- }
-
- private GroovyExpression fillVarWithTypeInstances(GraphPersistenceStrategies s, String typeName, String fillVar) {
- GroovyExpression graphExpr = getAllVerticesExpr();
- GroovyExpression typeAttributeNameExpr = new LiteralExpression(s.typeAttributeName());
- GroovyExpression typeNameExpr = new LiteralExpression(typeName);
- GroovyExpression hasExpr = new FunctionCallExpression(graphExpr, HAS_METHOD, typeAttributeNameExpr, typeNameExpr);
- GroovyExpression fillExpr = new FunctionCallExpression(hasExpr, FILL_METHOD, new IdentifierExpression(fillVar));
- return fillExpr;
- }
-
- private GroovyExpression fillVarWithSubTypeInstances(GraphPersistenceStrategies s, String typeName,
- String fillVar) {
- GroovyExpression graphExpr = getAllVerticesExpr();
- GroovyExpression superTypeAttributeNameExpr = new LiteralExpression(s.superTypeAttributeName());
- GroovyExpression typeNameExpr = new LiteralExpression(typeName);
- GroovyExpression hasExpr = new FunctionCallExpression(graphExpr, HAS_METHOD, superTypeAttributeNameExpr, typeNameExpr);
- GroovyExpression fillExpr = new FunctionCallExpression(hasExpr, FILL_METHOD, new IdentifierExpression(fillVar));
- return fillExpr;
- }
-
-
- private List<GroovyExpression> typeTestExpressionUsingFilter(GraphPersistenceStrategies s, GroovyExpression parent,
- String typeName) {
- GroovyExpression itExpr = getItVariable();
- GroovyExpression typeTestExpr = typeTestExpression(s, typeName, itExpr);
- GroovyExpression closureExpr = new ClosureExpression(typeTestExpr);
- GroovyExpression filterExpr = new FunctionCallExpression(parent, FILTER_METHOD, closureExpr);
- return Collections.singletonList(filterExpr);
- }
-
- /**
- * Generates an expression which checks whether the vertices in the query have
- * a field with the given name.
- *
- * @param parent
- * @param fieldName
- * @return
- */
- public GroovyExpression generateUnaryHasExpression(GroovyExpression parent, String fieldName) {
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, HAS_METHOD, new LiteralExpression(fieldName));
- }
-
- /**
- * Generates a path expression
- *
- * @param parent
- * @return
- */
- public GroovyExpression generatePathExpression(GroovyExpression parent) {
- return new FunctionCallExpression(TraversalStepType.MAP_TO_VALUE, parent, PATH_METHOD);
- }
-
- /**
- * Generates the emit expression used in loop expressions.
- * @param s
- * @param dataType
- * @return
- */
- protected GroovyExpression generateLoopEmitExpression(GraphPersistenceStrategies s, IDataType dataType) {
- return typeTestExpression(s, dataType.getName(), getCurrentObjectExpression());
- }
-
- /**
- * Generates an alias expression
- *
- * @param parent
- * @param alias
- * @return
- */
- public GroovyExpression generateAliasExpression(GroovyExpression parent, String alias) {
- return new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, parent, AS_METHOD, new LiteralExpression(alias));
- }
-
- /**
- * Generates an expression that gets the vertices adjacent to the vertex in 'parent'
- * in the specified direction.
- *
- * @param parent
- * @param dir
- * @return
- */
- public GroovyExpression generateAdjacentVerticesExpression(GroovyExpression parent, AtlasEdgeDirection dir) {
- return new FunctionCallExpression(TraversalStepType.FLAT_MAP_TO_ELEMENTS, parent, getGremlinFunctionName(dir));
- }
-
- private String getGremlinFunctionName(AtlasEdgeDirection dir) {
- switch(dir) {
- case IN:
- return "in";
- case OUT:
- return "out";
- case BOTH:
- return "both";
- default:
- throw new RuntimeException("Unknown Atlas Edge Direction: " + dir);
- }
- }
-
- /**
- * Generates an expression that gets the vertices adjacent to the vertex in 'parent'
- * in the specified direction, following only edges with the given label.
- *
- * @param parent
- * @param dir
- * @return
- */
- public GroovyExpression generateAdjacentVerticesExpression(GroovyExpression parent, AtlasEdgeDirection dir,
- String label) {
- return new FunctionCallExpression(TraversalStepType.FLAT_MAP_TO_ELEMENTS, parent, getGremlinFunctionName(dir), new LiteralExpression(label));
- }
-
- /**
- * Generates an arithmetic expression, e.g. a + b
- *
- */
- public GroovyExpression generateArithmeticExpression(GroovyExpression left, String operator,
- GroovyExpression right) throws AtlasException {
- ArithmeticOperator op = ArithmeticOperator.lookup(operator);
- return new ArithmeticExpression(left, op, right);
- }
-
- public abstract GroovyExpression generateGroupByExpression(GroovyExpression parent, GroovyExpression groupByExpression, GroovyExpression aggregationFunction);
-
- protected GroovyExpression getItVariable() {
- return new IdentifierExpression(IT_VARIABLE);
- }
-
- protected GroovyExpression getAllVerticesExpr() {
- GroovyExpression gExpr = getGraphExpression();
- return new FunctionCallExpression(TraversalStepType.START, gExpr, V_METHOD);
- }
-
- protected IdentifierExpression getGraphExpression() {
- return new IdentifierExpression(TraversalStepType.SOURCE, G_VARIABLE);
- }
-
-
- protected GroovyExpression getCurrentObjectExpression() {
- return new FieldExpression(getItVariable(), OBJECT_FIELD);
- }
-
- //assumes cast already performed
- public GroovyExpression generateCountExpression(GroovyExpression itExpr) {
- GroovyExpression collectionExpr = new CastExpression(itExpr,"Collection");
- return new FunctionCallExpression(collectionExpr, "size");
- }
-
- public GroovyExpression generateMinExpression(GroovyExpression itExpr, GroovyExpression mapFunction) {
- return getAggregrationExpression(itExpr, mapFunction, "min");
- }
-
- public GroovyExpression generateMaxExpression(GroovyExpression itExpr, GroovyExpression mapFunction) {
- return getAggregrationExpression(itExpr, mapFunction, "max");
- }
-
- public GroovyExpression generateSumExpression(GroovyExpression itExpr, GroovyExpression mapFunction) {
- return getAggregrationExpression(itExpr, mapFunction, "sum");
- }
-
- private GroovyExpression getAggregrationExpression(GroovyExpression itExpr,
- GroovyExpression mapFunction, String functionName) {
- GroovyExpression collectionExpr = new CastExpression(itExpr,"Collection");
- ClosureExpression collectFunction = new ClosureExpression(mapFunction);
- GroovyExpression transformedList = new FunctionCallExpression(collectionExpr, "collect", collectFunction);
- return new FunctionCallExpression(transformedList, functionName);
- }
-
- public GroovyExpression getClosureArgumentValue() {
- return getItVariable();
- }
-
- /**
- * Specifies the parent to use when translating the select list in
- * a group by statement.
- *
- * @return
- */
- public abstract GroovyExpression getGroupBySelectFieldParent();
-
- public GroovyExpression generateFillExpression(GroovyExpression parent, GroovyExpression variable) {
- return new FunctionCallExpression(TraversalStepType.END,parent , "fill", variable);
- }
-
- /**
- * Generates an anonymous graph traversal initialized with the specified value. In Gremlin 3, we need
- * to use a different syntax for this when the object is a map, so that information needs to be provided
- * to this method so that the correct syntax is used.
- *
- * @param isMap true if the value contains Map instances, false if it contains Vertex instances
- * @param valueCollection the source objects to start the traversal from.
- */
- public abstract GroovyExpression generateSeededTraversalExpresssion(boolean isMap, GroovyExpression valueCollection);
-
- /**
- * Returns the current value of the traverser. This is used when generating closure expressions that
- * need to operate on the current value in the graph graversal.
- *
- * @param traverser
- * @return
- */
- public abstract GroovyExpression getCurrentTraverserObject(GroovyExpression traverser);
-
- /**
- * Generates an expression that transforms the current value of the traverser by
- * applying the function specified
- *
- * @param parent
- * @param closureExpression
- * @return
- */
- public abstract GroovyExpression generateMapExpression(GroovyExpression parent, ClosureExpression closureExpression);
-
- /**
- * Returns whether a select statement generates a map (or Gremlin 2 "Row") when it contains the specified
- * number of aliases.
- *
- */
- public abstract boolean isSelectGeneratesMap(int aliasCount);
-
- /**
- * Generates an expression to get the value of the value from the row map
- * generated by select() with the specified key.
- *
- */
- public abstract GroovyExpression generateGetSelectedValueExpression(LiteralExpression key,
- GroovyExpression rowMapExpr);
-
- public GroovyExpression removeExtraMapFromPathInResult(GroovyExpression parent) {
- GroovyExpression listItem = getItVariable();
- GroovyExpression tailExpr = new FunctionCallExpression(listItem, "tail");
- return new FunctionCallExpression(parent, "collect", new ClosureExpression(tailExpr));
-
- }
-
- /**
- * Generates a toList expression to execute the gremlin query and
- * store the result in a new list.
- *
- * @param expr
- * @return
- */
- public GroovyExpression generateToListExpression(GroovyExpression expr) {
- return new FunctionCallExpression(TraversalStepType.END, expr, "toList");
- }
-
- /**
- * Finds aliases that absolutely must be brought along with this expression into
- * the output expression and cannot just be recreated there. For example, in the
- * Gremlin 2 loop expression, the loop semantics break of the alias is simply recreated
- * in the output expression.
- * @param expr
- * @return
- */
- public abstract List<String> getAliasesRequiredByExpression(GroovyExpression expr);
-
-
- /**
- * Checks if the given expression is an alias expression, and if so
- * returns the alias from the expression. Otherwise, null is
- * returned.
- */
- public String getAliasNameIfRelevant(GroovyExpression expr) {
- if(!(expr instanceof FunctionCallExpression)) {
- return null;
- }
- FunctionCallExpression fc = (FunctionCallExpression)expr;
- if(! fc.getFunctionName().equals(AS_METHOD)) {
- return null;
- }
- LiteralExpression aliasName = (LiteralExpression)fc.getArguments().get(0);
- return aliasName.getValue().toString();
-
- }
-
- public abstract boolean isRepeatExpression(GroovyExpression expr);
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/AliasFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/AliasFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/AliasFinder.java
deleted file mode 100644
index 3e6c39a..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/AliasFinder.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-
-/**
- * Finds all aliases in the expression.
- */
-public class AliasFinder implements CallHierarchyVisitor {
-
- private List<LiteralExpression> foundAliases = new ArrayList<>();
-
- //Whether a final alias is needed. A final alias is needed
- //if there are transformation steps after the last alias in
- //the expression. We initialize this to false since a final
- //alias is not needed if there are no aliases.
- private boolean finalAliasNeeded = false;
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
-
- }
-
- @Override
- public void visitNullCaller() {
-
- }
-
- private static final Set<TraversalStepType> TRANSFORMATION_STEP_TYPES = new HashSet<>(Arrays.asList(
- TraversalStepType.MAP_TO_ELEMENT,
- TraversalStepType.MAP_TO_VALUE,
- TraversalStepType.FLAT_MAP_TO_ELEMENTS,
- TraversalStepType.FLAT_MAP_TO_VALUES,
- TraversalStepType.BARRIER,
- TraversalStepType.NONE));
-
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
-
- if (functionCall instanceof FunctionCallExpression) {
- FunctionCallExpression expr = (FunctionCallExpression)functionCall;
- if (expr.getType() == TraversalStepType.SIDE_EFFECT && expr.getFunctionName().equals("as")) {
- //We found an alias. This is currently the last expression we've seen
- //in our traversal back up the expression tree, so at this point a final
- //alias is not needed.
- LiteralExpression aliasNameExpr = (LiteralExpression)expr.getArguments().get(0);
- foundAliases.add(aliasNameExpr);
- finalAliasNeeded=false;
- }
- }
-
- if(TRANSFORMATION_STEP_TYPES.contains(functionCall.getType())) {
- //This step changes the value of the traverser. Now, a final alias
- //needs to be added.
- if(!foundAliases.isEmpty()) {
- finalAliasNeeded = true;
- }
- }
-
- return true;
- }
-
- public List<LiteralExpression> getAliases() {
- return foundAliases;
- }
-
- public boolean isFinalAliasNeeded() {
-
- return finalAliasNeeded;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/CallHierarchyVisitor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/CallHierarchyVisitor.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/CallHierarchyVisitor.java
deleted file mode 100644
index 6089353..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/CallHierarchyVisitor.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * Call back interface for visiting the call hierarchy of a function call.
- */
-public interface CallHierarchyVisitor {
-
- /**
- * Visits a function expression before the visit to its caller.
- *
- * @param expr
- *
- * @return false to terminate the recursion
- */
- boolean preVisitFunctionCaller(AbstractFunctionExpression expr);
-
- /**
- * Called when a caller that is not an instance of
- * AbstractFunctionExpression is found. This indicates that the deepest
- * point in the call hierarchy has been reached.
- *
- *
- */
- void visitNonFunctionCaller(GroovyExpression expr);
-
- /**
- * Called when a null caller is found (this happens for static/user-defined
- * functions). This indicates that the deepest point in the call hierarchy
- * has been reached.
- *
- */
- void visitNullCaller();
-
- /**
- * Visits a function expression after the visit to its caller.
- *
- * @param expr
- *
- * @return false to terminate the recursion
- */
- boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall);
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandAndsOptimization.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandAndsOptimization.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandAndsOptimization.java
deleted file mode 100644
index d8ecd07..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandAndsOptimization.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Optimizer that pulls has expressions out of an 'and' expression.
- * <p>
- * For example:
- * <pre class=code>
- * g.V().and(has('x'),has('y') </pre>
- * <p>
- * is optimized to:
- * <pre class=code>
- * g.V().has('x').has('y') </pre>
- * <p>
- * There are certain cases where it is not safe to move an expression out
- * of the 'and'. For example, in the expression
- * <pre class=code>
- * g.V().and(has('x').out('y'),has('z')) </pre>
- * <p>
- * has('x').out('y') cannot be moved out of the 'and', since it changes the value of the traverser.
- * <p>
- * At this time, the ExpandAndsOptimizer is not able to handle this scenario, so we don't extract
- * that expression. In this case, the result is:
- * <pre class=code>
- * g.V().has('z').and(has('x').out('y')) </pre>
- * <p>
- * The optimizer will call ExpandAndsOptimization recursively on the children, so
- * there is no need to recursively update the children here.
- *
- */
-public class ExpandAndsOptimization implements GremlinOptimization {
-
- private static final Logger logger_ = LoggerFactory.getLogger(ExpandAndsOptimization.class);
-
-
- private final GremlinExpressionFactory factory;
-
- public ExpandAndsOptimization(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- @Override
- public boolean appliesTo(GroovyExpression expr, OptimizationContext contxt) {
- return expr instanceof FunctionCallExpression && ((FunctionCallExpression)expr).getFunctionName().equals("and");
- }
-
- /**
- * Expands the given and expression. There is no need to recursively
- * expand the children here. This method is called recursively by
- * GremlinQueryOptimier on the children.
- *
- */
- @Override
- public GroovyExpression apply(GroovyExpression expr, OptimizationContext context) {
-
- FunctionCallExpression exprAsFunction = (FunctionCallExpression)expr;
- GroovyExpression result = exprAsFunction.getCaller();
-
- List<GroovyExpression> nonExtractableArguments = new ArrayList<>();
- for(GroovyExpression argument : exprAsFunction.getArguments()) {
-
- if (GremlinQueryOptimizer.isExtractable(argument)) {
- //Set the caller of the deepest expression in the call hierarchy
- //of the argument to point to the current result.
- //For example, if result is "g.V()" and the updatedArgument is "has('x').has('y')",
- //updatedArgument would be a tree like this:
- //
- // has('y')
- // /
- // / caller
- // |/_
- // has('x')
- // /
- // / caller
- // |/_
- // (null)
- //
- //We would set the caller of has('x') to be g.V(), so result would become g.V().has('x').has('y').
- //
- // Note: This operation is currently done by making a copy of the argument tree. That should
- // be changed.
- result = GremlinQueryOptimizer.copyWithNewLeafNode(
- (AbstractFunctionExpression) argument, result);
- } else {
- logger_.warn("Found non-extractable argument '{}' in the 'and' expression '{}'",argument.toString(), expr.toString());
- nonExtractableArguments.add(argument);
- }
- }
-
- if (!nonExtractableArguments.isEmpty()) {
- //add a final 'and' call with the arguments that could not be extracted
- result = factory.generateLogicalExpression(result, "and", nonExtractableArguments);
- }
- return result;
- }
-
- @Override
- public boolean isApplyRecursively() {
- return true;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandOrsOptimization.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandOrsOptimization.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandOrsOptimization.java
deleted file mode 100644
index a48a007..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpandOrsOptimization.java
+++ /dev/null
@@ -1,588 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.StatementListExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.Lists;
-
-
-
-/**
- * Optimization that removes 'or' expressions from a graph traversal when possible
- * and replaces them with separate calls that are combined using a logical union operation.
- * Unfortunately, Titan does not use indices when executing the child graph traversals associated
- * with an 'or' call. In order to make the index be used, we split queries with
- * or expressions into multiple queries. These queries are executed individually,
- * using indices, and then the results are combined back together. Here is a
- * simple example to illustrate this:
- *
- * <h4>Original Query</h4>
- *
- * <pre>
- * g.V().or(has('name','Fred'),has('age','17'))
- * </pre>
- *
- *<h4>Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('name','Fred').fill(r);
- * g.V().has('age','17').fill(r);
- * r;
- * </pre>
- *
- * Here, we introduce an intermediate variable "r" which is declared as a Set. The Set is performing
- * the union for us. If there are vertices that happen to both have "Fred" as the name and "17" as the age,
- * the Set will prevent the second query execution from adding a duplicate vertex to the result. Recall that
- * in Groovy scripts, the last expression is the one that will be returned back to the caller. We refer to
- * that expression is the "result expression". For this example, the result expression is simply "r", which
- * contains the vertices that matched the query.
- * <p/>
- * If the query does any kind of transformation of the vertices to produce the query result, that needs
- * to be done in the result expression. To understand why that is, let's take a look at another example:
- *
- * <h4>Original Query</h4>
- *
- * <pre>
- * g.V().or(has('name','Fred'),has('age','17')).as('person').select('person').by('gender')
- * </pre>
- *
- * <h4>Incorrect Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('name','Fred').as('person').select('person').by('gender').fill(r)
- * g.V().has('age','17').as('person').select('person').by('gender').fill(r)
- * r;
- * </pre>
- *
- * The problem with this query is that now 'r' contains Strings (the gender of the person). Suppose
- * that there is one person named Fred and there are 3 people whose age is 17 (let's say Fred's age is 16).
- * The original query would have produced 4 rows, one corresponding to each of those people. The new
- * query would produce at most 2 rows - one for 'male' and one for 'female'. This is happening because
- * we are now performing the union on the Strings, not on the vertices. To fix this, we need to split
- * the original query and put the end portion into the result expression:
- *
- * <h4>Correct Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('name','Fred').fill(r)
- * g.V().has('age','17').fill(r)
- * __.inject(r as Object[]).as('person').select('person').by('gender')
- * </pre>
- *
- * The logic for doing this splitting is described in more detail in
- * {@link #moveTransformationsToResultExpression(GroovyExpression, OptimizationContext)}.
- * <p/>
- * There is one more problematic case that this optimizer is able to handle. Let's look at the following example:
- *
- * <h4>Original Query</h4>
- *
- * <pre>
- * g.V().or(has('type','Person'),has('superType','Person')).as('x').has('qualifiedName','Fred').as('y').select('x','y').by('name').by('name')
- * </pre>
- *
- * Queries of this form appear often when translating DSL queries.
- *
- * If we were to optimize this query using the logic described above, we would get something like this:
- *
- * <h4>Incorrect Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('type','Person').fill(r);
- * g.V().has('superType','Person').fill(r);
- * __.inject(r as Object[]).as('x').has('qualifiedName','Fred').as('y').select('x','y');
- * </pre>
- *
- * While not strictly incorrect, this query will not perform well since the index on qualifiedName will
- * not be used. In order for that index to be used, the 'has' expression needs to be part of the original
- * query. However, if we do that alone, the query will be broken, since the select
- * will now refer to an undefined label:
- *
- * <h4>Incorrect Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('type','Person').as('x').has('qualifiedName','Fred').fill(r);
- * g.V().has('superType','Person').as('x').has('qualifiedName','Fred').fill(r);
- * __.inject(r as Object[]).as('y').select('x','y')
- * </pre>
- *
- * To fix this, we need to save the values of the aliased vertices in the original
- * query, and create labels in the result expression that refer to them. We do this
- * as follows:
- *
- * <h4>Correct Optimized Query</h4>
- *
- * <pre>
- * def r = [] as Set;
- * g.V().has('type','Person').as('x').has('qualifiedName','Fred').as('y').select('x','y').fill(r);
- * g.V().has('superType','Person').as('x').has('qualifiedName','Fred').select('x','y').fill(r);
- * __.inject(r as Object[]).as('__tmp').map({((Map)it.get()).get('x')}).as('x').select('__tmp').map({((Map)it.get()).get('x')}).as('y').select('x','y').by('name').by('name')
- * </pre>
- *
- * This is not pretty, but is the best solution we've found so far for supporting expressions that contain aliases in this optimization.
- * What ends up happening is that r gets populated with alias->Vertex maps. In the result expression, we make 'x' point
- * to a step where the value in the traverser is the vertex for 'x', and we do the same thing for y. The <code>select('_tmp')</code> step in the middle restores the value of
- * the traverser back to the map.
- * <p/>
- * The one known issue with the alias rearrangement is that it breaks loop expressions. As a result, expressions containing loops are currently excluded
- * from this optimization.
- *
- * ExpandOrsOptimization expands the entire expression tree recursively, so it is not invoked
- * recursively by GremlinQueryOptimizer.
- *
- */
-public class ExpandOrsOptimization implements GremlinOptimization {
-
- private static final Logger logger_ = LoggerFactory.getLogger(ExpandOrsOptimization.class);
-
- private final GremlinExpressionFactory factory;
-
- public ExpandOrsOptimization(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- @Override
- public boolean appliesTo(GroovyExpression expr, OptimizationContext contxt) {
-
- ExpressionFinder finder = new ExpressionFinder(IsOr.INSTANCE);
- GremlinQueryOptimizer.visitCallHierarchy(expr, finder);
- return finder.isExpressionFound();
- }
-
- @Override
- public GroovyExpression apply(GroovyExpression expr, OptimizationContext context) {
-
- setupRangeOptimization(expr, context);
- GroovyExpression traveralExpression = moveTransformationsToResultExpression(expr, context);
-
- FunctionGenerator functionGenerator = new FunctionGenerator(factory, context);
- GremlinQueryOptimizer.visitCallHierarchy(traveralExpression, functionGenerator);
- traveralExpression = functionGenerator.getNewRootExpression();
- List<GroovyExpression> bodyExpressions = expandOrs(traveralExpression, context);
-
-
- //Adds a statement to define the result variable 'v' in the
- //groovy script. The variable is declared as a Set. The type
- //of the objects in the Set depend on the number of aliases in the Groovy
- // expression:
- // - 0 or 1 alias : Vertex
- // - multiple aliases: Map<String,Vertex>
- StatementListExpression result = new StatementListExpression();
- context.prependStatement(context.getDefineResultVariableStmt());
-
-
- for (GroovyExpression bodyExpression : bodyExpressions) {
- result.addStatement(bodyExpression);
- }
- result.addStatement(context.getResultExpression());
- return result;
- }
-
- private void setupRangeOptimization(GroovyExpression expr, OptimizationContext context) {
-
- // Find any range expressions in the expression tree.
- RangeFinder rangeFinder = new RangeFinder(factory);
- GremlinQueryOptimizer.visitCallHierarchy(expr, rangeFinder);
- List<AbstractFunctionExpression> rangeExpressions = rangeFinder.getRangeExpressions();
- if (rangeExpressions.size() == 1) {
- OrderFinder orderFinder = new OrderFinder(factory);
- GremlinQueryOptimizer.visitCallHierarchy(expr, orderFinder);
- if (!orderFinder.hasOrderExpression()) {
- // If there is one range expression and no order expression in the unoptimized gremlin,
- // save the range parameters to use for adding a range expression to
- // each expanded "or" expression result, such that it will only contain the specified range of vertices.
- // For now, apply this optimization only if the range start index is zero.
- AbstractFunctionExpression rangeExpression = rangeExpressions.get(0);
- int[] rangeParameters = factory.getRangeParameters(rangeExpression);
- if (rangeParameters[0] == 0) {
- context.setRangeExpression(rangeExpression);
- }
- }
- }
- }
-
- private GroovyExpression moveTransformationsToResultExpression(GroovyExpression expr, OptimizationContext context) {
- GroovyExpression traveralExpression = expr;
-
- // Determine the 'split point'. This is the expression that will become
- // the deepest function call in the result expression. If a split
- // point is found, its caller is changed. The new caller is
- // set to the graph traversal expression in the result expression.
- // The original caller becomes the new traversal expression that
- // will be carried through the rest of the 'or' expansion processing.
- //
- // Example: g.V().has('x').as('x').select('x')
- // Here, select('x') is the split expression
- // so :
- // 1) the result expression in OptimizationContext becomes [base result expression].select('x')
- // 2) we return g.V().has('x').as('x')
-
- SplitPointFinder finder = new SplitPointFinder(factory);
- GremlinQueryOptimizer.visitCallHierarchy(traveralExpression, finder);
- AbstractFunctionExpression splitPoint = finder.getSplitPoint();
-
-
- List<LiteralExpression> aliases = new ArrayList<>();
-
- //If we're not splitting the query, there is no need to save/restore
- //the aliases.
- if(splitPoint != null) {
-
- traveralExpression = splitPoint.getCaller();
-
- AliasFinder aliasFinder = new AliasFinder();
- GremlinQueryOptimizer.visitCallHierarchy(traveralExpression, aliasFinder);
- aliases.addAll(aliasFinder.getAliases());
- if(aliasFinder.isFinalAliasNeeded()) {
- //The last alias in the expression does not capture the final vertex in the traverser,
- //so we need to create an alias to record that.
- traveralExpression = factory.generateAliasExpression(traveralExpression, context.getFinalAliasName());
- aliases.add(new LiteralExpression(context.getFinalAliasName()));
- }
-
- GroovyExpression resultExpr = getBaseResultExpression(context, aliases);
- splitPoint.setCaller(resultExpr);
- expr = removeMapFromPathsIfNeeded(expr, aliases);
- context.setResultExpression(expr);
- }
-
- //Add expression(s) to the end of the traversal expression to add the vertices
- //that were found into the intermediate variable ('r')
- traveralExpression = addCallToUpdateResultVariable(traveralExpression, aliases, context);
- return traveralExpression;
- }
-
- private GroovyExpression removeMapFromPathsIfNeeded(GroovyExpression expr, List<LiteralExpression> aliases) {
- if(aliases.size() > 0 && factory.isSelectGeneratesMap(aliases.size())) {
- RepeatExpressionFinder repeatExprFinder = new RepeatExpressionFinder(factory);
- GremlinQueryOptimizer.visitCallHierarchy(expr, repeatExprFinder);
- boolean hasRepeat = repeatExprFinder.isRepeatExpressionFound();
-
- PathExpressionFinder pathExprFinder = new PathExpressionFinder();
- GremlinQueryOptimizer.visitCallHierarchy(expr, pathExprFinder);
- boolean hasPath = pathExprFinder.isPathExpressionFound();
- if(! hasRepeat && hasPath) {
- //the path will now start with the map that we added. That is an artifact
- //of the optimization process and must be removed.
- if(expr.getType() != TraversalStepType.END && expr.getType() != TraversalStepType.NONE) {
- //we're still in the pipeline, need to execute the query before we can
- //modify the result
- expr = factory.generateToListExpression(expr);
- }
- expr = factory.removeExtraMapFromPathInResult(expr);
- }
-
- }
- return expr;
- }
-
- /**
- * This method adds steps to the end of the initial traversal to add the vertices
- * that were found into an intermediate variable (defined as a Set). If there is one alias,
- * this set will contain the vertices associated with that Alias. If there are multiple
- * aliases, the values in the set will be alias->vertex maps that have the vertex
- * associated with the alias for each result.
-
- * @param expr
- * @param aliasNames
- * @param context
- * @return
- */
- private GroovyExpression addCallToUpdateResultVariable(GroovyExpression expr,List<LiteralExpression> aliasNames, OptimizationContext context) {
-
- GroovyExpression result = expr;
- // If there is one range expression in the unoptimized gremlin,
- // add a range expression here so that the intermediate variable will only contain
- // the specified range of vertices.
- AbstractFunctionExpression rangeExpression = context.getRangeExpression();
- if (rangeExpression != null) {
- int[] rangeParameters = factory.getRangeParameters(rangeExpression);
- result = factory.generateRangeExpression(result, rangeParameters[0], rangeParameters[1]);
- }
- if( ! aliasNames.isEmpty()) {
- result = factory.generateSelectExpression(result, aliasNames, Collections.<GroovyExpression>emptyList());
- }
- return factory.generateFillExpression(result, context.getResultVariable());
- }
-
- /**
- * Recursively traverses the given expression, expanding or expressions
- * wherever they are found.
- *
- * @param expr
- * @param context
- * @return expressions that should be unioned together to get the query result
- */
- private List<GroovyExpression> expandOrs(GroovyExpression expr, OptimizationContext context) {
-
- if (GremlinQueryOptimizer.isOrExpression(expr)) {
- return expandOrFunction(expr, context);
- }
- return processOtherExpression(expr, context);
- }
-
- /**
- * This method takes an 'or' expression and expands it into multiple expressions.
- *
- * For example:
- *
- * g.V().or(has('x'),has('y')
- *
- * is expanded to:
- *
- * g.V().has('x')
- * g.V().has('y')
- *
- * There are certain cases where it is not safe to move an expression out
- * of the 'or'. For example, in the expression
- *
- * g.V().or(has('x').out('y'),has('z'))
- *
- * has('x').out('y') cannot be moved out of the 'or', since it changes the value of the traverser.
- *
- * At this time, the ExpandOrsOptimizer is not able to handle this scenario, so we don't remove
- * that expression. In cases like this, a final expression is created that ors together
- * all of the expressions that could not be extracted. In this case that would be:
- *
- * g.V().has('z')
- * g.V().or(has('y').out('z'))
- *
- * This processing is done recursively.
- *
- *
- * @param expr
- * @param context
- * @return the expressions that should be unioned together to get the query result
- */
- private List<GroovyExpression> expandOrFunction(GroovyExpression expr, OptimizationContext context) {
- FunctionCallExpression functionCall = (FunctionCallExpression) expr;
- GroovyExpression caller = functionCall.getCaller();
- List<GroovyExpression> updatedCallers = null;
- if (caller != null) {
- updatedCallers = expandOrs(caller, context);
- } else {
- updatedCallers = Collections.singletonList(null);
- }
- UpdatedExpressions newArguments = getUpdatedChildren(functionCall.getArguments(), context);
- List<GroovyExpression> allUpdatedArguments = new ArrayList<>();
- for (List<GroovyExpression> exprs : newArguments.getUpdatedChildren()) {
- allUpdatedArguments.addAll(exprs);
- }
- List<AbstractFunctionExpression> extractableArguments = new ArrayList<>();
- List<GroovyExpression> nonExtractableArguments = new ArrayList<>();
- for (GroovyExpression argument : allUpdatedArguments) {
-
- if (GremlinQueryOptimizer.isExtractable(argument)) {
- extractableArguments.add((AbstractFunctionExpression) argument);
- } else {
- logger_.warn("Found non-extractable argument '{}; in the 'or' expression '{}'",argument.toString(), expr.toString());
- nonExtractableArguments.add(argument);
- }
- }
-
- List<GroovyExpression> result = new ArrayList<>();
- for (GroovyExpression updatedCaller : updatedCallers) {
-
- for (AbstractFunctionExpression arg : extractableArguments) {
- GroovyExpression updated = GremlinQueryOptimizer.copyWithNewLeafNode(arg, updatedCaller);
- result.add(updated);
- }
- if (!nonExtractableArguments.isEmpty()) {
- result.add(factory.generateLogicalExpression(updatedCaller, "or", nonExtractableArguments));
- }
-
- }
- return result;
- }
-
- private UpdatedExpressions getUpdatedChildren(List<GroovyExpression> children, OptimizationContext context) {
- List<List<GroovyExpression>> updatedChildren = new ArrayList<>();
- boolean changed = false;
- for (GroovyExpression child : children) {
- List<GroovyExpression> childChoices = expandOrs(child, context);
- if (childChoices.size() != 1 || childChoices.iterator().next() != child) {
- changed = true;
- }
- updatedChildren.add(childChoices);
- }
- return new UpdatedExpressions(changed, updatedChildren);
- }
-
- private UpdatedExpressions getUpdatedChildren(GroovyExpression expr, OptimizationContext context) {
- return getUpdatedChildren(expr.getChildren(), context);
- }
-
- /**
- * This is called when we encounter an expression that is not an "or", for example an "and" expressio. For these
- * expressions, we process the children and create copies with the cartesian product of the updated
- * arguments.
- *
- * Example:
- *
- * g.V().and(or(has('x),has('y'), or(has('a'),has('b')))
- *
- * Here, we have an "and" expression with two children:
- *
- * 1) or(has('x),has('y')
- * 2) or(has('a'),has('b'))
- *
- * We first process these children. They each yield 2 expressions:
- *
- * 1 -> [ has('x'), has('y') ]
- * 2 -> [ has('a'), has('b') ]
- *
- * The cartesian product of these gives this:
- *
- * [ has('x'), has('a') ]
- * [ has('x'), has('b') ]
- * [ has('y'), has('a') ]
- * [ has('y'), has('b') ]
- *
- * So the overall result is:
- *
- * g.V().and(has('x'), has('a'))
- * g.V().and(has('x'), has('b'))
- * g.V().and(has('y'), has('a'))
- * g.V().and(has('y'), has('b'))
- *
- *
- * @param source
- * @param context
- * @return expressions that should be unioned together to get the query result
- */
- private List<GroovyExpression> processOtherExpression(GroovyExpression source, OptimizationContext context) {
- UpdatedExpressions updatedChildren = getUpdatedChildren(source, context);
- if (!updatedChildren.hasChanges()) {
- return Collections.singletonList(source);
- }
- List<GroovyExpression> result = new ArrayList<GroovyExpression>();
-
- //The updated children list we get back has the possible values for each child
- //in the expression. We compute a cartesian product to get all possible
- //combinations of child values.
- List<List<GroovyExpression>> updateChildLists = Lists.cartesianProduct(updatedChildren.getUpdatedChildren());
-
- for (List<GroovyExpression> updatedChildList : updateChildLists) {
- result.add(source.copy(updatedChildList));
- }
- return result;
- }
-
- @Override
- public boolean isApplyRecursively() {
- return false;
- }
-
- /**
- *
- * This method creates a base result expression that recreates the state of the
- * graph traverser at start of the result expression to what it would have been
- * if we had been executing one Gremlin query (instead of many and doing a union).
- *
- * To do this, we start with an anonymous graph traversal that will iterate
- * through the values in the intermediate Set that was created. We then need
- * to set things up so that the aliases that were in the original gremlin query
- * refer to steps with the correct traverser value.
- *
- * The way we do this depends on the number of aliases. If there are 0 or 1 alias,
- * the intermediate variable already contains Vertices, so we just create the alias.
- *
- * If there are multiple aliases, the intermediate variable contains a String->Vertex
- * map. We first create a temporary alias that refers to that map. For each alias,
- * we use a MapStep to map the map to the Vertex for that alias. We then add back
- * the alias, making it refer to the MapStep. Between the alias restorations, we restore the
- * traverser object back to the map.
- *
- * @param context
- * @param aliases
- * @return
- */
- private GroovyExpression getBaseResultExpression(OptimizationContext context,
- List<LiteralExpression> aliases) {
-
- //Start with an anonymous traversal that gets its objects from the intermediate result variable.
- GroovyExpression parent = factory.generateSeededTraversalExpresssion(aliases.size() > 1, context.getResultVariable());
-
- if(aliases.isEmpty()) {
- return parent;
- }
-
- //The expression we will return.
- GroovyExpression result = parent;
-
- //We use a temporary alias to save/restore the original value of the traverser
- //at the start of the query. We do this so we can set the value of the traverser
- //back to being the map after we retrieve each alias. If there is only one
- //alias, the save/restore is not needed, so there is no need to create this alias.
- if(aliases.size() > 1) {
-
- result = factory.generateAliasExpression(result, context.getTempAliasName());
- }
-
- Iterator<LiteralExpression> it = aliases.iterator();
- while(it.hasNext()) {
- LiteralExpression curAlias = it.next();
- //A map is only generated by Gremlin when there is more than one alias. When there is only one
- //alias, the intermediate variable will directly contain the vertices.`
- if(factory.isSelectGeneratesMap(aliases.size())) {
- //Since there is more than one alias, the current traverser object is an alias->vertex
- //map. We use a MapStep to map that map to the Vertex for the current alias. This sets
- //the current traverser object to that Vertex. We do this by defining the closure we
- //pass to the MapStep call [map].get(aliasName) where [map] is the expression
- //that refers to the map.
-
- GroovyExpression rowMapExpr = factory.getCurrentTraverserObject(factory.getClosureArgumentValue());
- GroovyExpression getExpr = factory.generateGetSelectedValueExpression(curAlias, rowMapExpr);
- result = factory.generateMapExpression(result, new ClosureExpression(getExpr));
- }
-
- //Create alias that points to the previous step. The traverser value at that step
- //is the Vertex associated with this alias.
- result = factory.generateAliasExpression(result, curAlias.getValue().toString());
- if(it.hasNext()) {
- //Restore the current value of the traverser back to the current alias->vertex map
- result = factory.generateBackReferenceExpression(result, false, context.getTempAliasName());
- }
- }
- return result;
- }
-
-
-
-
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpressionFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpressionFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpressionFinder.java
deleted file mode 100644
index 2721049..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/ExpressionFinder.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import com.google.common.base.Function;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * Call hierarchy visitor that checks if an expression
- * matching the specified criteria is present
- * in the call hierarch.
- */
-public class ExpressionFinder implements CallHierarchyVisitor {
-
- private final Function<GroovyExpression, Boolean> predicate;
- private boolean expressionFound = false;
-
- public ExpressionFinder(Function<GroovyExpression, Boolean> predicate) {
- this.predicate = predicate;
- }
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
- if (predicate.apply(expr)) {
- expressionFound = true;
- return false;
- }
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
- if (predicate.apply(expr)) {
- expressionFound = true;
- }
- }
-
- @Override
- public void visitNullCaller() {
- //nothing to do
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
- //nothing to do
- return true;
- }
-
- public boolean isExpressionFound() {
- return expressionFound;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/FunctionGenerator.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/FunctionGenerator.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/FunctionGenerator.java
deleted file mode 100644
index 1a93d0f..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/FunctionGenerator.java
+++ /dev/null
@@ -1,326 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.List;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.ClosureExpression.VariableDeclaration;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-
-/**
- * Extracts common expressions from an or-containing expression
- * into functions. These expressions would otherwise be duplicated
- * as part of expanding the "or". Doing this shortens the overall length
- * of the Gremlin script so we can maximize query performance.
- *
- */
-public class FunctionGenerator implements CallHierarchyVisitor {
-
- //Function length constants.
- //These assume we won't reach more than 9 function definition. Even if we do, this is still
- //a reasonable approximation.
- private static final int INITIAL_FUNCTION_DEF_LENGTH = "def f1={};".length();
- private final int functionDefLength;
- private static final int FUNCTION_CALL_OVERHEAD = "f1()".length();
-
- /**
- * The expression that should be the first (deepest) expression
- * in the body of the next generated function. As we go up the
- * expression tree in the post visit, this is updated based on the
- * expressions we see. During the post visits, if it is null,
- * the body expression is set to the expression we're visiting.
- * As we go up the tree, it is nulled out if we create a function
- * or encounter an or expression. This guarantees that the
- * next function body will not contain any or expressions
- * and that it will not have expressions that are already
- * part of some other function.
- */
- private GroovyExpression nextFunctionBodyStart;
-
- /**
- * The number of times expressions will be duplicated.
- */
- private int scaleFactor = 1;
-
- private final OptimizationContext context;
-
- /**
- * The current depth in the expression tree.
- */
- private int depth = 0;
-
- /**
- * The name of the last function that was generated. If set,
- * we can safely update this function instead of creating a new one.
- */
- private String currentFunctionName;
-
- /**
- * The updated expression we will pass back to the caller.
- */
- private GroovyExpression newRootExpression;
-
- private final GremlinExpressionFactory factory;
-
- public FunctionGenerator(GremlinExpressionFactory factory, OptimizationContext context) {
- this.context = context;
- this.factory = factory;
- functionDefLength = ("def f1={" + factory.getTraversalExpressionClass() + " x->};").length();
- }
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
- depth++;
- if (IsOr.INSTANCE.apply(expr)) {
- FunctionCallExpression functionCall = (FunctionCallExpression) expr;
- scaleFactor *= functionCall.getArguments().size();
- }
- if (newRootExpression == null) {
- newRootExpression = expr;
- }
-
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
- if (nextFunctionBodyStart == null) {
- nextFunctionBodyStart = expr;
- }
-
- }
-
- @Override
- public void visitNullCaller() {
- //nothing to do
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression expr) {
- boolean isRootExpr = depth == 1;
- visitParentExpression(expr);
-
- //The root expression has no parent. To simplify the logic, we create
- //a dummy expression so it does have a parent, then call visitParentExpression again
- //to examine the root expression.
- if (isRootExpr) {
- FunctionCallExpression dummyParent = new FunctionCallExpression(expr, "dummy");
- visitParentExpression(dummyParent);
- newRootExpression = dummyParent.getCaller();
- }
-
- depth--;
- return true;
- }
-
- /**
- * Checks to see if the *caller* of this expression should become part
- * of a function. If so, either a new function is created, or the
- * expression becomes part of the last function we created.
- *
- * @param parentExpr
- */
- private void visitParentExpression(AbstractFunctionExpression parentExpr) {
-
- if (nextFunctionBodyStart == null) {
- nextFunctionBodyStart = parentExpr;
- }
-
- if (currentFunctionName != null) {
- updateCurrentFunction(parentExpr);
- } else {
- createFunctionIfNeeded(parentExpr);
- }
-
- if (GremlinQueryOptimizer.isOrExpression(parentExpr)) {
- //reset
- currentFunctionName = null;
- //don't include 'or' in generated functions
- nextFunctionBodyStart = null;
- }
-
- }
-
- /**
- * Creates a function whose body goes from the child of parentExpr
- * up to (and including) the functionBodyEndExpr.
- * @param parentExpr
- */
- private void createFunctionIfNeeded(AbstractFunctionExpression parentExpr) {
- GroovyExpression potentialFunctionBody = parentExpr.getCaller();
-
- if (creatingFunctionShortensGremlin(potentialFunctionBody)) {
- GroovyExpression functionCall = null;
-
- if (nextFunctionBodyStart instanceof AbstractFunctionExpression) {
- //The function body start is a a function call. In this
- //case, we generate a function that takes one argument, which
- //is a graph traversal. We have an expression tree that
- //looks kind of like the following:
- //
- // parentExpr
- // /
- // / caller
- // |/_
- // potentialFunctionBody
- // /
- // / caller
- // |/_
- // ...
- // /
- // / caller
- // |/_
- // nextFunctionBodyStart
- // /
- // / caller
- // |/_
- // oldCaller
- //
- //
- // Note that potentialFunctionBody and nextFunctionBodyStart
- // could be the same expression. Let's say that the next
- // function name is f1
- //
- // We reshuffle these expressions to the following:
- //
- // parentExpr
- // /
- // / caller
- // |/_
- // f1(oldCaller)
- //
- //
- // potentialFunctionBody <- body of new function "f1(GraphTraversal x)"
- // /
- // / caller
- // |/_
- // ...
- // /
- // / caller
- // |/_
- // nextFunctionBodyStart
- // /
- // / caller
- // |/_
- // x
- //
- // As an example, suppose parentExpr is g.V().or(x,y).has(a).has(b).has(c)
- // where has(a) is nextFunctionBodyStart.
- //
- // We generate a function f1 = { GraphTraversal x -> x.has(a).has(b) }
- // parentExpr would become : f1(g.V().or(x,y)).has(c)
-
- AbstractFunctionExpression nextFunctionBodyStartFunction=
- (AbstractFunctionExpression) nextFunctionBodyStart;
- String variableName = "x";
- IdentifierExpression var = new IdentifierExpression(variableName);
- GroovyExpression oldCaller = nextFunctionBodyStartFunction.getCaller();
- nextFunctionBodyStartFunction.setCaller(var);
-
- currentFunctionName = context.addFunctionDefinition(new VariableDeclaration(factory.getTraversalExpressionClass(), "x"),
- potentialFunctionBody);
- functionCall = new FunctionCallExpression(potentialFunctionBody.getType(),
- currentFunctionName, oldCaller);
-
- } else {
- //The function body start is a not a function call. In this
- //case, we generate a function that takes no arguments.
-
- // As an example, suppose parentExpr is g.V().has(a).has(b).has(c)
- // where g is nextFunctionBodyStart.
- //
- // We generate a function f1 = { g.V().has(a).has(b) }
- // parentExpr would become : f1().has(c)
-
- currentFunctionName = context.addFunctionDefinition(null, potentialFunctionBody);
- functionCall = new FunctionCallExpression(potentialFunctionBody.getType(), currentFunctionName);
- }
-
- //functionBodyEnd is now part of a function definition, don't propagate it
- nextFunctionBodyStart = null;
- parentExpr.setCaller(functionCall);
- }
- }
-
- /**
- * Adds the caller of parentExpr to the current body of the last
- * function that was created.
- *
- * @param parentExpr
- */
- private void updateCurrentFunction(AbstractFunctionExpression parentExpr) {
- GroovyExpression expr = parentExpr.getCaller();
- if (expr instanceof AbstractFunctionExpression) {
- AbstractFunctionExpression exprAsFunction = (AbstractFunctionExpression) expr;
- GroovyExpression exprCaller = exprAsFunction.getCaller();
- parentExpr.setCaller(exprCaller);
- updateCurrentFunctionDefintion(exprAsFunction);
- }
- }
-
- private void updateCurrentFunctionDefintion(AbstractFunctionExpression exprToAdd) {
- ClosureExpression functionBodyClosure = context.getUserDefinedFunctionBody(currentFunctionName);
- if (functionBodyClosure == null) {
- throw new IllegalStateException("User-defined function " + currentFunctionName + " not found!");
- }
- List<GroovyExpression> exprs = functionBodyClosure.getStatements();
- GroovyExpression currentFunctionBody = exprs.get(exprs.size() - 1);
- //Update the expression so it is called by the current return
- //value of the function.
- exprToAdd.setCaller(currentFunctionBody);
- functionBodyClosure.replaceStatement(exprs.size() - 1, exprToAdd);
- }
-
- //Determines if extracting this expression into a function will shorten
- //the overall length of the Groovy script.
- private boolean creatingFunctionShortensGremlin(GroovyExpression headExpr) {
- int tailLength = getTailLength();
- int length = headExpr.toString().length() - tailLength;
-
- int overhead = 0;
- if (nextFunctionBodyStart instanceof AbstractFunctionExpression) {
- overhead = functionDefLength;
- } else {
- overhead = INITIAL_FUNCTION_DEF_LENGTH;
- }
- overhead += FUNCTION_CALL_OVERHEAD * scaleFactor;
- //length * scaleFactor = space taken by having the expression be inlined [scaleFactor] times
- //overhead + length = space taken by the function definition and its calls
- return length * scaleFactor > overhead + length;
- }
-
- private int getTailLength() {
- if (nextFunctionBodyStart == null) {
- return 0;
- }
- if (!(nextFunctionBodyStart instanceof AbstractFunctionExpression)) {
- return 0;
- }
- AbstractFunctionExpression bodyEndAsFunction = (AbstractFunctionExpression) nextFunctionBodyStart;
- return bodyEndAsFunction.getCaller().toString().length();
- }
-
- public GroovyExpression getNewRootExpression() {
- return newRootExpression;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinOptimization.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinOptimization.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinOptimization.java
deleted file mode 100644
index bfa45af..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinOptimization.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * An optimization that can be applied to a gremlin query.
- */
-public interface GremlinOptimization {
-
- /**
- * Whether or not this optimization should be applied to the given expression
- * @param expr
- * @param contxt
- * @return
- */
- boolean appliesTo(GroovyExpression expr, OptimizationContext contxt);
- /**
- * Whether or not GremlinQueryOptimizer should call this optimization recursively
- * on the updated children.
- */
- boolean isApplyRecursively();
-
- /**
- * Applies the optimization.
- *
- * @param expr
- * @param context
- * @return the optimized expression
- */
- GroovyExpression apply(GroovyExpression expr, OptimizationContext context);
-}
[05/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala
deleted file mode 100644
index 91e72c7..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/MultiplicityTest.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.AtlasException
-import org.apache.atlas.typesystem.types.{ClassType, Multiplicity, TypeSystem}
-import org.testng.annotations.{BeforeMethod,Test}
-
-class MultiplicityTest {
-
- @BeforeMethod
- def beforeAll {
- TypeSystem.getInstance().reset()
-
- val b = new TypesBuilder
- import b._
-
- val tDef = types {
-
- _trait("Dimension") {}
- _trait("PII") {}
- _trait("Metric") {}
- _trait("ETL") {}
- _trait("JdbcAccess") {}
-
- _class("DB") {
- "name" ~ (string, required, indexed, unique)
- "owner" ~ (string)
- "createTime" ~ (int)
- }
-
- _class("StorageDesc") {
- "inputFormat" ~ (string, required)
- "outputFormat" ~ (string, required)
- }
-
- _class("Column") {
- "name" ~ (string, required)
- "dataType" ~ (string, required)
- "sd" ~ ("StorageDesc", required)
- }
-
- _class("Table", List()) {
- "name" ~ (string, required, indexed)
- "db" ~ ("DB", required)
- "sd" ~ ("StorageDesc", required)
- }
-
- _class("LoadProcess") {
- "name" ~ (string, required)
- "inputTables" ~ (array("Table"), collection)
- "outputTable" ~ ("Table", required)
-
- }
-
- _class("View") {
- "name" ~ (string, required)
- "inputTables" ~ (array("Table"), collection)
- }
-
- _class("AT") {
- "name" ~ (string, required)
- "stringSet" ~ (array("string"), multiplicty(0, Int.MaxValue, true))
- }
- }
-
- TypeSystem.getInstance().defineTypes(tDef)
- }
-
- @Test
- def test1 {
-
- val b = new InstanceBuilder
- import b._
-
- val instances = b create {
- val a = instance("AT") { // use instance to create Referenceables. use closure to
- // set attributes of instance
- 'name ~ "A1" // use '~' to set attributes. Use a Symbol (names starting with ') for
- 'stringSet ~ Seq("a", "a")
- }
- }
-
- val ts = TypeSystem.getInstance()
- import scala.collection.JavaConversions._
- val typedInstances = instances.map { i =>
- val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName)
- iTyp.convert(i, Multiplicity.REQUIRED)
- }
-
- typedInstances.foreach { i =>
- println(i)
- }
- }
-
- @Test(expectedExceptions = Array(classOf[AtlasException]) , expectedExceptionsMessageRegExp = "A multiplicty of more than one requires a collection type for attribute 'stringSet'")
- def WrongMultiplicity {
- val b = new TypesBuilder
- import b._
- val tDef = types {
- _class("Wrong") {
- "name" ~ (string, required)
- "stringSet" ~ (string, multiplicty(0, Int.MaxValue, true))
- }
- }
- TypeSystem.getInstance().defineTypes(tDef)
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala
deleted file mode 100644
index d01adb4..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/TypesBuilderTest.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.typesystem.json.TypesSerialization
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.testng.annotations.Test
-
-class TypesBuilderTest extends BuilderTest {
-
-
- @Test def test1 {
- TypeSystem.getInstance().defineTypes(tDef)
-
- println(TypesSerialization.toJson(TypeSystem.getInstance(), x => true))
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/json/InstanceSerializationTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/InstanceSerializationTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/InstanceSerializationTest.scala
deleted file mode 100644
index 9e656a5..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/InstanceSerializationTest.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import scala.util.Random
-
-import org.apache.atlas.typesystem.Referenceable
-import org.apache.atlas.typesystem.persistence.Id
-import org.apache.atlas.typesystem.types.DataTypes
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.apache.atlas.typesystem.types.utils.TypesUtil
-import org.testng.Assert.assertEquals
-import org.testng.Assert.assertNotNull
-import org.testng.Assert.assertTrue
-import org.testng.annotations.BeforeClass
-import org.testng.annotations.Test
-
-import com.google.common.collect.ImmutableSet
-
-class InstanceSerializationTest {
- private var typeName: String = null
-
- @BeforeClass def setup {
- typeName = "Random_" + Math.abs(Random.nextInt())
- val clsType = TypesUtil.createClassTypeDef(typeName, "Random-description", ImmutableSet.of[String](),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE))
- TypeSystem.getInstance().defineClassType(clsType)
- }
-
- @Test def testIdentity {
- val entity: Referenceable = new Referenceable(typeName)
- val json: String = InstanceSerialization.toJson(entity, true)
- val entity2: Referenceable = InstanceSerialization.fromJsonReferenceable(json, true)
- assertNotNull(entity2)
- assertEquals(entity2.getId, entity.getId, "Simple conversion failed")
- assertEquals(entity2.getTraits, entity.getTraits, "Traits mismatch")
- }
-
- @Test def testReferenceArrayWithNoState {
- val staticJson = s"""{
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Reference",
- "id": {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Id",
- "version": 0,
- "typeName": "LoadProcess"
- },
- "typeName": "LoadProcess",
- "values": {
- "inputTables": [{
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Id",
- "id": "bacfa996-e88e-4d7e-9630-68c9829b10b4",
- "version": 0,
- "typeName": "Table"
- }, {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Id",
- "id": "6da06805-3f56-446f-8831-672a65ac2199",
- "version": 0,
- "typeName": "Table"
- }, {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Reference",
- "typeName": "$typeName",
- "values": {}
- "traitNames": []
- "traits": {}
- }
- ],
- "outputTable": {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Id",
- "id": "d5c3d6d0-aa10-44c1-b05d-ed9400d2a5ac",
- "version": 0,
- "typeName": "Table"
- },
- "name": "loadSalesDaily"
- },
- "traitNames": [
- "ETL"
- ],
- "traits": {
- "ETL": {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Struct",
- "typeName": "ETL",
- "values": {
- }
- }
- }
- }
- """;
-
- val entity: Referenceable = InstanceSerialization.fromJsonReferenceable(staticJson, true)
- val outputTable = entity.getValuesMap.get("outputTable")
- val inputTables : java.util.List[_] = entity.getValuesMap().get("inputTables").asInstanceOf[java.util.List[_]]
-
- assertTrue(entity.getId.isInstanceOf[Id]);
- assertTrue(outputTable.isInstanceOf[Id]);
- import scala.collection.JavaConversions._
- assertTrue(inputTables(0).isInstanceOf[Id]);
- assertTrue(inputTables(1).isInstanceOf[Id]);
- assertTrue(inputTables(2).isInstanceOf[Referenceable]);
- }
-
- @Test def testMissingStateInId: Unit = {
- val entity: Referenceable = new Referenceable(typeName)
- val staticJson: String = s"""{
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Reference",
- "id": {
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Id",
- "id": "${entity.getId.id}",
- "version":0,
- "typeName": "${entity.getTypeName}",
- },
- "typeName": "${entity.getTypeName}",
- "values": {}
- "traitNames": []
- "traits": {}
- }"""
- val entity2: Referenceable = InstanceSerialization.fromJsonReferenceable(staticJson, true)
- assertNotNull(entity2)
- assertNotNull(entity2.getId)
- assertNotNull(entity2.getId.id) // This creates a new id so the values will not match.
- assertEquals(entity2.getId.typeName, entity.getId.typeName)
- assertEquals(entity2.getId.version, entity.getId.version)
- assertEquals(entity2.getId.state, entity.getId.state)
- assertEquals(entity2.getTypeName, entity.getTypeName, "Type name mismatch")
- assertEquals(entity2.getValuesMap, entity.getValuesMap, "Values mismatch")
- assertEquals(entity2.getTraits, entity.getTraits, "Traits mismatch")
- }
-
- @Test def testMissingId: Unit = {
- val entity: Referenceable = new Referenceable(typeName)
- val staticJson: String = s"""{
- "jsonClass": "org.apache.atlas.typesystem.json.InstanceSerialization$$_Reference",
- "typeName": "${entity.getTypeName}",
- "values": {}
- "traitNames": []
- "traits": {}
- }"""
- val entity2: Referenceable = InstanceSerialization.fromJsonReferenceable(staticJson, true)
- assertNotNull(entity2)
- assertNotNull(entity2.getId)
- assertNotNull(entity2.getId.id) // This creates a new id so the values will not match.
- assertEquals(entity2.getId.typeName, entity.getId.typeName)
- assertEquals(entity2.getId.version, entity.getId.version)
- assertEquals(entity2.getId.state, entity.getId.state)
- assertEquals(entity2.getTypeName, entity.getTypeName, "Type name mismatch")
- assertEquals(entity2.getValuesMap, entity.getValuesMap, "Values mismatch")
- assertEquals(entity2.getTraits, entity.getTraits, "Traits mismatch")
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala
deleted file mode 100755
index 931773d..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/SerializationTest.scala
+++ /dev/null
@@ -1,263 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.typesystem.persistence.Id.EntityState
-import org.apache.atlas.typesystem.persistence.{Id, ReferenceableInstance, StructInstance}
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.types.utils.TypesUtil
-import org.apache.atlas.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct}
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.{write => swrite, _}
-import org.json4s.{NoTypeHints, _}
-import org.testng.Assert
-import org.testng.annotations.{BeforeMethod,Test}
-import com.google.common.collect.ImmutableSet
-import org.testng.Assert.assertEquals
-
-class SerializationTest extends BaseTest {
-
- private[atlas] var structType: StructType = null
- private[atlas] var recursiveStructType: StructType = null
-
- @BeforeMethod
- override def setup {
- super.setup
- structType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType]
- recursiveStructType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType]
- }
-
- @Test def test1 {
- val s: Struct = BaseTest.createStruct()
- val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
-
- Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{a=1.0, b=2.0}\n\tp : \t\n\tq : \t<null>\n\tr : \t{a=}\n}")
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new BigDecimalSerializer + new BigIntegerSerializer
-
- //Json representation
- val ser = swrite(ts)
- val ser1 = swrite(ts.toString)
- Assert.assertEquals(ser1, "\"{\\n\\ta : \\t1\\n\\tb : \\ttrue\\n\\tc : \\t1\\n\\td : \\t2\\n\\te : \\t1\\n\\tf : \\t1\\n\\tg : \\t1\\n\\th : \\t1.0\\n\\ti : \\t1.0\\n\\tj : \\t1\\n\\tk : \\t1\\n\\tl : \\t" + BaseTest.TEST_DATE + "\\n\\tm : \\t[1, 1]\\n\\tn : \\t[1.1, 1.1]\\n\\to : \\t{a=1.0, b=2.0}\\n\\tp : \\t\\n\\tq : \\t<null>\\n\\tr : \\t{a=}\\n}\"");
- // Typed Struct read back
- val ts1 = read[StructInstance](ser)
- Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{a=1.0, b=2.0}\n\tp : \t\n\tq : \t<null>\n\tr : \t{a=}\n}")
- }
-
- @Test def test2 {
- val s: Struct = BaseTest.createStruct()
- val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED)
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new BigDecimalSerializer + new BigIntegerSerializer
-
- val ts1 = read[StructInstance](
- """
- {"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0,
- "c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"a":1.0,"b":2.0}}""")
- // Typed Struct read from string
- Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03T19:38:55.053Z\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{a=1.0, b=2.0}\n\tp : \t<null>\n\tq : \t<null>\n\tr : \t<null>\n}")
- }
-
- @Test def testTrait {
- val A: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef("A", null,
- TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
- TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE))
- val B: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef(
- "B", ImmutableSet.of[String]("A"),
- TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE))
- val C: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef(
- "C", ImmutableSet.of[String]("A"),
- TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE))
- val D: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef(
- "D", ImmutableSet.of[String]("B", "C"),
- TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE))
-
- defineTraits(A, B, C, D)
-
- val DType: TraitType = getTypeSystem.getDataType(classOf[TraitType], "D").asInstanceOf[TraitType]
- val s1: Struct = new Struct("D")
- s1.set("d", 1)
- s1.set("c", 1)
- s1.set("b", true)
- s1.set("a", 1)
- s1.set("A.B.D.b", true)
- s1.set("A.B.D.c", 2)
- s1.set("A.B.D.d", 2)
- s1.set("A.C.D.a", 3)
- s1.set("A.C.D.b", false)
- s1.set("A.C.D.c", 3)
- s1.set("A.C.D.d", 3)
-
- val s: Struct = BaseTest.createStruct()
- val ts: ITypedStruct = DType.convert(s1, Multiplicity.REQUIRED)
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new BigDecimalSerializer + new BigIntegerSerializer
-
- // Typed Struct :
- Assert.assertEquals(ts.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}")
-
- // Json representation :
- val ser = swrite(ts)
- Assert.assertEquals(ser, "{\"$typeName$\":\"D\",\"A.C.D.d\":3,\"A.B.D.c\":2,\"b\":true,\"A.C.D.c\":3,\"d\":1,\"A.B.D.b\":true,\"a\":1,\"A.C.D.b\":false,\"A.B.D.d\":2,\"c\":1,\"A.C.D.a\":3}")
-
- val ts1 = read[StructInstance](
- """
- {"$typeName$":"D","A.C.D.d":3,"A.B.D.c":2,"b":true,"A.C.D.c":3,"d":1,
- "A.B.D.b":true,"a":1,"A.C.D.b":false,"A.B.D.d":2,"c":1,"A.C.D.a":3}""")
- // Typed Struct read from string:
- Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}")
- }
-
- def defineHRTypes(ts: TypeSystem) : Unit = {
- val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
- "Department",
- ImmutableSet.of[String],
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, true, "department"))
- val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
- "Person", ImmutableSet.of[String],
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"))
- val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef(
- "Manager", ImmutableSet.of[String]("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager"))
- val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] =
- TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableSet.of[String],
- TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE))
-
- ts.defineTypes(ImmutableList.of[EnumTypeDefinition], ImmutableList.of[StructTypeDefinition],
- ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef),
- ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)
- )
-
- }
-
- def defineHRDept() : Referenceable = {
- val hrDept: Referenceable = new Referenceable("Department")
- val john: Referenceable = new Referenceable("Person")
- val jane: Referenceable = new Referenceable("Manager", "SecurityClearance")
- hrDept.set("name", "hr")
- john.set("name", "John")
- john.set("department", hrDept.getId)
- jane.set("name", "Jane")
- jane.set("department", hrDept.getId)
- john.set("manager", jane.getId)
- hrDept.set("employees", ImmutableList.of[Referenceable](john, jane))
- jane.set("subordinates", ImmutableList.of[Id](john.getId))
- jane.getTrait("SecurityClearance").set("level", 1)
- hrDept
- }
-
- @Test def testClass {
-
- val ts: TypeSystem = getTypeSystem
- defineHRTypes(ts)
- val hrDept: Referenceable = defineHRDept()
-
- val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
- val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
-
- println(s"HR Dept Object Graph:\n${hrDept2}\n")
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- val ser = swrite(hrDept2)
- println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
-
- println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
- }
-
- @Test def testReference {
-
- val ts: TypeSystem = getTypeSystem
- defineHRTypes(ts)
- val hrDept: Referenceable = defineHRDept()
-
-
- val jsonStr = InstanceSerialization.toJson(hrDept)
- val hrDept2 = InstanceSerialization.fromJsonReferenceable(jsonStr)
-
- val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
- val hrDept3: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED)
-
- println(s"HR Dept Object Graph:\n${hrDept3}\n")
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- val ser = swrite(hrDept3)
- println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
-
- println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
- }
-
- @Test def testReference2 {
-
- val ts: TypeSystem = getTypeSystem
- defineHRTypes(ts)
- val hrDept: Referenceable = defineHRDept()
-
- val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
- val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
-
- val jsonStr = InstanceSerialization.toJson(hrDept2)
- val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr)
-
- val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED)
-
- println(s"HR Dept Object Graph:\n${hrDept4}\n")
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- val ser = swrite(hrDept4)
- println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n")
-
- println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n")
-
- }
-
- @Test def testIdSerde: Unit = {
-
- val ts: TypeSystem = getTypeSystem
- defineHRTypes(ts)
- val hrDept: Referenceable = defineHRDept()
- //default state is actiev by default
- assertEquals(hrDept.getId.getState, EntityState.ACTIVE)
-
- val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department")
- val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED)
- hrDept2.getId.state = EntityState.DELETED
-
- //updated state should be maintained correctly after serialisation-deserialisation
- val deptJson: String = InstanceSerialization.toJson(hrDept2, true)
- val deserDept: Referenceable = InstanceSerialization.fromJsonReferenceable(deptJson, true)
- assertEquals(deserDept.getId.getState, EntityState.DELETED)
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala
deleted file mode 100755
index cfd4bdb..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala
+++ /dev/null
@@ -1,342 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.typesystem.types._
-import org.testng.Assert
-import org.testng.annotations.Test
-
-class TypesSerializationTest extends BaseTest with TypeHelpers {
-
- @Test def test1: Unit = {
-
- val ts = getTypeSystem
-
- val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)),
- optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)),
- optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
-
-
- ts.defineTypes(ImmutableList.of[EnumTypeDefinition], ImmutableList.of[StructTypeDefinition](sDef),
- ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
- ImmutableList.of[HierarchicalTypeDefinition[ClassType]]
- )
-
- val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(),
- requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE))
- val B: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE))
- val C: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE))
- val D: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE))
-
- defineTraits(ts, A, B, C, D)
-
- ts.defineEnumType("HiveObjectType",
- new EnumValue("GLOBAL", 1),
- new EnumValue("DATABASE", 2),
- new EnumValue("TABLE", 3),
- new EnumValue("PARTITION", 4),
- new EnumValue("COLUMN", 5))
-
- ts.defineEnumType("PrincipalType",
- new EnumValue("USER", 1),
- new EnumValue("ROLE", 2),
- new EnumValue("GROUP", 3))
-
- ts.defineEnumType("TxnState",
- new EnumValue("COMMITTED", 1),
- new EnumValue("ABORTED", 2),
- new EnumValue("OPEN", 3))
-
- ts.defineEnumType("LockLevel",
- new EnumValue("DB", 1),
- new EnumValue("TABLE", 2),
- new EnumValue("PARTITION", 3))
-
- ts.defineEnumType("TestType", "TestType-description",
- new EnumValue("A", 1),
- new EnumValue("B", 2),
- new EnumValue("C", 3))
-
- defineClassType(ts, createClassTypeDef("t4", List(),
- requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("enum1", ts.getDataType(classOf[EnumType], "HiveObjectType")),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("enum2", ts.getDataType(classOf[EnumType], "PrincipalType")),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("enum3", ts.getDataType(classOf[EnumType], "TxnState")),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", ts.defineArrayType(DataTypes.INT_TYPE)),
- optionalAttr("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
- optionalAttr("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
- optionalAttr("enum4", ts.getDataType(classOf[EnumType], "LockLevel"))))
-
- val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(),
- requiredAttr("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, true, "department"))
- val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(),
- requiredAttr("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")
- )
- val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager")
- )
- val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("SecurityClearance", List(),
- requiredAttr("level", DataTypes.INT_TYPE)
- )
-
- val securityClearanceTypeDefWithDesc: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("SecurityClearance2", Some("SecurityClearance-Description"), List(),
- requiredAttr("level", DataTypes.INT_TYPE)
- )
- ts.defineTypes(ImmutableList.of[EnumTypeDefinition], ImmutableList.of[StructTypeDefinition],
- ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef, securityClearanceTypeDefWithDesc),
- ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef))
-
- val ser = TypesSerialization.toJson(ts, _ => true)
-
- val typesDef1 = TypesSerialization.fromJson(ser)
-
- val ts1 = TypeSystem.getInstance()
- ts1.reset()
-
- typesDef1.enumTypes.foreach(ts1.defineEnumType(_))
-
- ts1.defineTypes(ImmutableList.of[EnumTypeDefinition], ImmutableList.copyOf(typesDef1.structTypes.toArray),
- ImmutableList.copyOf(typesDef1.traitTypes.toArray),
- ImmutableList.copyOf(typesDef1.classTypes.toArray)
- )
- val ser2 = TypesSerialization.toJson(ts1, _ => true)
- val typesDef2 = TypesSerialization.fromJson(ser2)
-
- Assert.assertEquals(typesDef1, typesDef2)
- }
-
- @Test def test2: Unit = {
-
- val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)),
- optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)),
- optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
-
-
-
- val ser2 = TypesSerialization.toJson(sDef)
- val typesDef2 = TypesSerialization.fromJson(ser2)
-
- Assert.assertEquals(sDef, typesDef2.structTypes(0))
- }
-
- @Test def test3: Unit = {
-
- val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)),
- optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)),
- optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
-
- val ser = TypesSerialization.toJson(sDef)
- val typesDef2 = TypesSerialization.fromJson(ser)
-
- Assert.assertEquals(sDef, typesDef2.structTypes(0))
-
- //Now with description
- val sDef2 = structDef("ts1", Some("ts1-description"), requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)),
- optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)),
- optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
-
- val ser2 = TypesSerialization.toJson(sDef)
- val typesDef3 = TypesSerialization.fromJson(ser2)
- Assert.assertEquals(sDef, typesDef3.structTypes(0))
-
- }
-
- @Test def test4 : Unit = {
-
- val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(),
- requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE))
- val B: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE))
- val C: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE))
- val D: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE))
- val E: HierarchicalTypeDefinition[TraitType] =
- createTraitTypeDef("E", Some("E-description"), Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE))
- val typDefs = Seq(A,B,C,D,E)
- typDefs.foreach { tDef =>
- val ser2 = TypesSerialization.toJson(tDef, true)
- val typesDef2 = TypesSerialization.fromJson(ser2)
- Assert.assertEquals(tDef, typesDef2.traitTypes(0))
-
- }
- }
-
- @Test def test5 : Unit = {
- val e1 = new EnumTypeDefinition("HiveObjectType",
- new EnumValue("GLOBAL", 1),
- new EnumValue("DATABASE", 2),
- new EnumValue("TABLE", 3),
- new EnumValue("PARTITION", 4),
- new EnumValue("COLUMN", 5))
-
- val e2 = new EnumTypeDefinition("PrincipalType",
- new EnumValue("USER", 1),
- new EnumValue("ROLE", 2),
- new EnumValue("GROUP", 3))
-
- val e3 = new EnumTypeDefinition("TxnState",
- new EnumValue("COMMITTED", 1),
- new EnumValue("ABORTED", 2),
- new EnumValue("OPEN", 3))
-
- val e4 = new EnumTypeDefinition("LockLevel",
- new EnumValue("DB", 1),
- new EnumValue("TABLE", 2),
- new EnumValue("PARTITION", 3))
-
- val e5 = new EnumTypeDefinition("LockLevel", "LockLevel-description",
- new EnumValue("DB", 1),
- new EnumValue("TABLE", 2),
- new EnumValue("PARTITION", 3))
-
- val typDefs = Seq(e1,e2,e3,e4,e5)
- typDefs.foreach { tDef =>
- val ser2 = TypesSerialization.toJson(tDef)
- val typesDef2 = TypesSerialization.fromJson(ser2)
- Assert.assertEquals(tDef, typesDef2.enumTypes(0))
-
- }
- }
-
- @Test def test6 : Unit = {
- val typDef = createClassTypeDef("t4", List(),
- requiredAttr("a", DataTypes.INT_TYPE),
- optionalAttr("b", DataTypes.BOOLEAN_TYPE),
- optionalAttr("c", DataTypes.BYTE_TYPE),
- optionalAttr("d", DataTypes.SHORT_TYPE),
- optionalAttr("enum1", "HiveObjectType"),
- optionalAttr("e", DataTypes.INT_TYPE),
- optionalAttr("f", DataTypes.INT_TYPE),
- optionalAttr("g", DataTypes.LONG_TYPE),
- optionalAttr("enum2", "PrincipalType"),
- optionalAttr("h", DataTypes.FLOAT_TYPE),
- optionalAttr("i", DataTypes.DOUBLE_TYPE),
- optionalAttr("j", DataTypes.BIGINTEGER_TYPE),
- optionalAttr("k", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("enum3", "TxnState"),
- optionalAttr("l", DataTypes.DATE_TYPE),
- optionalAttr("m", DataTypes.INT_TYPE),
- optionalAttr("n", DataTypes.BIGDECIMAL_TYPE),
- optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
- optionalAttr("enum4", "LockLevel"))
-
- val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(),
- requiredAttr("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, true, "department"))
- val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(),
- requiredAttr("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")
- )
- val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager")
- )
-
- val managerTypeDefWithDesc: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", Some("Manager-description"), List("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager")
- )
-
- val typDefs = Seq(typDef, deptTypeDef, personTypeDef, managerTypeDef, managerTypeDefWithDesc)
- typDefs.foreach { tDef =>
- val ser2 = TypesSerialization.toJson(tDef, false)
- val typesDef2 = TypesSerialization.fromJson(ser2)
- Assert.assertEquals(tDef, typesDef2.classTypes(0))
-
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/pom.xml
----------------------------------------------------------------------
diff --git a/webapp/pom.xml b/webapp/pom.xml
index 782c280..569e06f 100755
--- a/webapp/pom.xml
+++ b/webapp/pom.xml
@@ -88,11 +88,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-repository</artifactId>
</dependency>
@@ -262,11 +257,6 @@
</dependency>
<dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- </dependency>
-
- <dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jsp</artifactId>
</dependency>
@@ -390,13 +380,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-intg</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
@@ -638,7 +621,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../intg/target/test-classes</value>
</systemProperty>
</systemProperties>
<stopKey>atlas-stop</stopKey>
@@ -665,11 +648,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/classification/InterfaceAudience.java b/webapp/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
new file mode 100755
index 0000000..ae162ac
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.classification;
+
+import java.lang.annotation.Documented;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Annotation to mark methods for consumption.
+ */
+@InterfaceAudience.Public
+public class InterfaceAudience {
+ private InterfaceAudience() {
+ }
+
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ public @interface Private {
+ }
+
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ public @interface LimitedPrivate {
+ String[] value();
+ }
+
+ @Documented
+ @Retention(RetentionPolicy.RUNTIME)
+ public @interface Public {
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
index 91ba111..0e2d32d 100755
--- a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
+++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java
@@ -20,31 +20,23 @@ package org.apache.atlas.examples;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.configuration.Configuration;
import org.codehaus.jettison.json.JSONArray;
+
+import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
/**
@@ -142,7 +134,7 @@ public class QuickStart {
void createTypes() throws Exception {
TypesDef typesDef = createTypeDefinitions();
- String typesAsJSON = TypesSerialization.toJson(typesDef);
+ String typesAsJSON = AtlasType.toV1Json(typesDef);
System.out.println("typesAsJSON = " + typesAsJSON);
metadataServiceClient.createType(typesAsJSON);
@@ -151,80 +143,80 @@ public class QuickStart {
}
TypesDef createTypeDefinitions() throws Exception {
- HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
+ ClassTypeDefinition dbClsDef = TypesUtil
.createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE, null,
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
- attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
+ TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("locationUri", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG));
- HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
- .createClassTypeDef(STORAGE_DESC_TYPE, STORAGE_DESC_TYPE, null, attrDef("location", DataTypes.STRING_TYPE),
- attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
- attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
+ ClassTypeDefinition storageDescClsDef = TypesUtil
+ .createClassTypeDef(STORAGE_DESC_TYPE, STORAGE_DESC_TYPE, null, attrDef("location", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("inputFormat", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("outputFormat", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("compressed", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED, false, null));
- HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
- .createClassTypeDef(COLUMN_TYPE, COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
- attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
+ ClassTypeDefinition columnClsDef = TypesUtil
+ .createClassTypeDef(COLUMN_TYPE, COLUMN_TYPE, null, attrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("dataType", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("comment", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
- HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
- .createClassTypeDef(TABLE_TYPE, TABLE_TYPE, ImmutableSet.of("DataSet"),
+ ClassTypeDefinition tblClsDef = TypesUtil
+ .createClassTypeDef(TABLE_TYPE, TABLE_TYPE, Collections.singleton("DataSet"),
new AttributeDefinition(DB_ATTRIBUTE, DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null),
- attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
- attrDef("lastAccessTime", DataTypes.LONG_TYPE), attrDef("retention", DataTypes.LONG_TYPE),
- attrDef("viewOriginalText", DataTypes.STRING_TYPE),
- attrDef("viewExpandedText", DataTypes.STRING_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
- attrDef("temporary", DataTypes.BOOLEAN_TYPE),
- new AttributeDefinition(COLUMNS_ATTRIBUTE, DataTypes.arrayTypeName(COLUMN_TYPE),
+ attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("lastAccessTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG), attrDef("retention", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("viewOriginalText", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("viewExpandedText", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("tableType", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("temporary", AtlasBaseTypeDef.ATLAS_TYPE_BOOLEAN),
+ new AttributeDefinition(COLUMNS_ATTRIBUTE, AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE),
Multiplicity.COLLECTION, true, null));
- HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
- .createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, ImmutableSet.of("Process"),
- attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
- attrDef("endTime", DataTypes.LONG_TYPE),
- attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
-
- HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
- .createClassTypeDef(VIEW_TYPE, VIEW_TYPE, ImmutableSet.of("DataSet"),
+ ClassTypeDefinition loadProcessClsDef = TypesUtil
+ .createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, Collections.singleton("Process"),
+ attrDef("userName", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("startTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("endTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("queryText", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryPlan", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryId", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryGraph", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED));
+
+ ClassTypeDefinition viewClsDef = TypesUtil
+ .createClassTypeDef(VIEW_TYPE, VIEW_TYPE, Collections.singleton("DataSet"),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
- new AttributeDefinition("inputTables", DataTypes.arrayTypeName(TABLE_TYPE),
+ new AttributeDefinition("inputTables", AtlasBaseTypeDef.getArrayTypeName(TABLE_TYPE),
Multiplicity.COLLECTION, false, null));
- HierarchicalTypeDefinition<TraitType> dimTraitDef = TypesUtil.createTraitTypeDef("Dimension_v1", "Dimension Trait", null);
+ TraitTypeDefinition dimTraitDef = TypesUtil.createTraitTypeDef("Dimension_v1", "Dimension Trait", null);
- HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact_v1", "Fact Trait", null);
+ TraitTypeDefinition factTraitDef = TypesUtil.createTraitTypeDef("Fact_v1", "Fact Trait", null);
- HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII_v1", "PII Trait", null);
+ TraitTypeDefinition piiTraitDef = TypesUtil.createTraitTypeDef("PII_v1", "PII Trait", null);
- HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric_v1", "Metric Trait", null);
+ TraitTypeDefinition metricTraitDef = TypesUtil.createTraitTypeDef("Metric_v1", "Metric Trait", null);
- HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL_v1", "ETL Trait", null);
+ TraitTypeDefinition etlTraitDef = TypesUtil.createTraitTypeDef("ETL_v1", "ETL Trait", null);
- HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess_v1", "JdbcAccess Trait", null);
+ TraitTypeDefinition jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess_v1", "JdbcAccess Trait", null);
- HierarchicalTypeDefinition<TraitType> logTraitDef = TypesUtil.createTraitTypeDef("Log Data_v1", "LogData Trait", null);
+ TraitTypeDefinition logTraitDef = TypesUtil.createTraitTypeDef("Log Data_v1", "LogData Trait", null);
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef, logTraitDef),
- ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
+ return new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(),
+ Arrays.asList(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef, logTraitDef),
+ Arrays.asList(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef));
}
- AttributeDefinition attrDef(String name, IDataType dT) {
+ AttributeDefinition attrDef(String name, String dT) {
return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
}
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
+ AttributeDefinition attrDef(String name, String dT, Multiplicity m) {
return attrDef(name, dT, m, false, null);
}
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
+ AttributeDefinition attrDef(String name, String dT, Multiplicity m, boolean isComposite,
String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dT);
- return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
+ return new AttributeDefinition(name, dT, m, isComposite, reverseAttributeName);
}
void createEntities() throws Exception {
@@ -235,35 +227,31 @@ public class QuickStart {
rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat",
true);
- List<Referenceable> salesFactColumns = ImmutableList
- .of(rawColumn(TIME_ID_COLUMN, "int", "time id"), rawColumn("product_id", "int", "product id"),
+ List<Referenceable> salesFactColumns = Arrays.asList(rawColumn(TIME_ID_COLUMN, "int", "time id"), rawColumn("product_id", "int", "product id"),
rawColumn("customer_id", "int", "customer id", "PII_v1"),
rawColumn("sales", "double", "product id", "Metric_v1"));
- List<Referenceable> logFactColumns = ImmutableList
- .of(rawColumn("time_id", "int", "time id"), rawColumn("app_id", "int", "app id"),
+ List<Referenceable> logFactColumns = Arrays.asList(rawColumn("time_id", "int", "time id"), rawColumn("app_id", "int", "app id"),
rawColumn("machine_id", "int", "machine id"), rawColumn("log", "string", "log data", "Log Data_v1"));
Id salesFact = table(SALES_FACT_TABLE, SALES_FACT_TABLE_DESCRIPTION, salesDB, sd, "Joe", "Managed",
salesFactColumns, FACT_TRAIT);
- List<Referenceable> productDimColumns = ImmutableList
- .of(rawColumn("product_id", "int", "product id"), rawColumn("product_name", "string", "product name"),
+ List<Referenceable> productDimColumns = Arrays.asList(rawColumn("product_id", "int", "product id"), rawColumn("product_name", "string", "product name"),
rawColumn("brand_name", "int", "brand name"));
Id productDim =
table(PRODUCT_DIM_TABLE, "product dimension table", salesDB, sd, "John Doe", "Managed",
productDimColumns, "Dimension_v1");
- List<Referenceable> timeDimColumns = ImmutableList
- .of(rawColumn("time_id", "int", "time id"), rawColumn("dayOfYear", "int", "day Of Year"),
+ List<Referenceable> timeDimColumns = Arrays.asList(rawColumn("time_id", "int", "time id"), rawColumn("dayOfYear", "int", "day Of Year"),
rawColumn("weekDay", "int", "week Day"));
Id timeDim = table(TIME_DIM_TABLE, "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
"Dimension_v1");
- List<Referenceable> customerDimColumns = ImmutableList.of(rawColumn("customer_id", "int", "customer id", "PII_v1"),
+ List<Referenceable> customerDimColumns = Arrays.asList(rawColumn("customer_id", "int", "customer id", "PII_v1"),
rawColumn("name", "string", "customer name", "PII_v1"),
rawColumn("address", "string", "customer address", "PII_v1"));
@@ -286,32 +274,32 @@ public class QuickStart {
logFactColumns, "Log Data_v1");
loadProcess(LOAD_SALES_DAILY_PROCESS, LOAD_SALES_DAILY_PROCESS_DESCRIPTION, "John ETL",
- ImmutableList.of(salesFact, timeDim),
- ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL_v1");
+ Arrays.asList(salesFact, timeDim),
+ Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL_v1");
- view(PRODUCT_DIM_VIEW, reportingDB, ImmutableList.of(productDim), "Dimension_v1", "JdbcAccess_v1");
+ view(PRODUCT_DIM_VIEW, reportingDB, Collections.singletonList(productDim), "Dimension_v1", "JdbcAccess_v1");
- view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension_v1", "JdbcAccess_v1");
+ view("customer_dim_view", reportingDB, Collections.singletonList(customerDim), "Dimension_v1", "JdbcAccess_v1");
Id salesFactMonthly =
table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
"Managed", salesFactColumns, "Metric_v1");
- loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
- ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1");
+ loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", Collections.singletonList(salesFactDaily),
+ Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1");
Id loggingFactMonthly =
table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL",
"Managed", logFactColumns, "Log Data_v1");
- loadProcess("loadLogsMonthly", "hive query for monthly summary", "Tim ETL", ImmutableList.of(loggingFactDaily),
- ImmutableList.of(loggingFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1");
+ loadProcess("loadLogsMonthly", "hive query for monthly summary", "Tim ETL", Collections.singletonList(loggingFactDaily),
+ Collections.singletonList(loggingFactMonthly), "create table as select ", "plan", "id", "graph", "ETL_v1");
}
private Id createInstance(Referenceable referenceable) throws Exception {
String typeName = referenceable.getTypeName();
- String entityJSON = InstanceSerialization.toJson(referenceable, true);
+ String entityJSON = AtlasType.toV1Json(referenceable);
System.out.println("Submitting new entity= " + entityJSON);
List<String> guids = metadataServiceClient.createEntity(entityJSON);
System.out.println("created instance for type " + typeName + ", guid: " + guids);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java
index a95fac3..a338ae2 100755
--- a/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java
+++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStartV2.java
@@ -19,8 +19,6 @@
package org.apache.atlas.examples;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
@@ -53,11 +51,7 @@ import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.ArrayUtils;
import javax.ws.rs.core.MultivaluedMap;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE;
import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF;
@@ -218,7 +212,7 @@ public class QuickStartV2 {
colType.setOptions(new HashMap<String, String>() {{ put("schemaAttributes", "[\"name\", \"description\", \"owner\", \"type\", \"comment\", \"position\"]"); }});
- AtlasEntityDef tblType = AtlasTypeUtil.createClassTypeDef(TABLE_TYPE, TABLE_TYPE, "1.0", ImmutableSet.of("DataSet"),
+ AtlasEntityDef tblType = AtlasTypeUtil.createClassTypeDef(TABLE_TYPE, TABLE_TYPE, "1.0", Collections.singleton("DataSet"),
AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE),
AtlasTypeUtil.createRequiredAttrDefWithConstraint("sd", STORAGE_DESC_TYPE, CONSTRAINT_TYPE_OWNED_REF, null),
AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
@@ -234,7 +228,7 @@ public class QuickStartV2 {
tblType.setOptions(new HashMap<String, String>() {{ put("schemaElementsAttribute", "columns"); }});
- AtlasEntityDef procType = AtlasTypeUtil.createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, "1.0", ImmutableSet.of("Process"),
+ AtlasEntityDef procType = AtlasTypeUtil.createClassTypeDef(LOAD_PROCESS_TYPE, LOAD_PROCESS_TYPE, "1.0", Collections.singleton("Process"),
AtlasTypeUtil.createOptionalAttrDef("userName", "string"),
AtlasTypeUtil.createOptionalAttrDef("startTime", "long"),
AtlasTypeUtil.createOptionalAttrDef("endTime", "long"),
@@ -243,22 +237,22 @@ public class QuickStartV2 {
AtlasTypeUtil.createRequiredAttrDef("queryId", "string"),
AtlasTypeUtil.createRequiredAttrDef("queryGraph", "string"));
- AtlasEntityDef viewType = AtlasTypeUtil.createClassTypeDef(VIEW_TYPE, VIEW_TYPE, "1.0", ImmutableSet.of("DataSet"),
+ AtlasEntityDef viewType = AtlasTypeUtil.createClassTypeDef(VIEW_TYPE, VIEW_TYPE, "1.0", Collections.singleton("DataSet"),
AtlasTypeUtil.createRequiredAttrDef("db", DATABASE_TYPE),
AtlasTypeUtil.createOptionalListAttrDef("inputTables", AtlasBaseTypeDef.getArrayTypeName(TABLE_TYPE)));
- AtlasClassificationDef dimClassifDef = AtlasTypeUtil.createTraitTypeDef(DIMENSION_CLASSIFICATION, "Dimension Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef factClassifDef = AtlasTypeUtil.createTraitTypeDef(FACT_CLASSIFICATION, "Fact Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef piiClassifDef = AtlasTypeUtil.createTraitTypeDef(PII_CLASSIFICATION, "PII Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef metricClassifDef = AtlasTypeUtil.createTraitTypeDef(METRIC_CLASSIFICATION, "Metric Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef etlClassifDef = AtlasTypeUtil.createTraitTypeDef(ETL_CLASSIFICATION, "ETL Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef jdbcClassifDef = AtlasTypeUtil.createTraitTypeDef(JDBC_CLASSIFICATION, "JdbcAccess Classification", "1.0", ImmutableSet.<String>of());
- AtlasClassificationDef logClassifDef = AtlasTypeUtil.createTraitTypeDef(LOGDATA_CLASSIFICATION, "LogData Classification", "1.0", ImmutableSet.<String>of());
-
- return AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.of(dimClassifDef, factClassifDef, piiClassifDef, metricClassifDef, etlClassifDef, jdbcClassifDef, logClassifDef),
- ImmutableList.of(dbType, sdType, colType, tblType, procType, viewType));
+ AtlasClassificationDef dimClassifDef = AtlasTypeUtil.createTraitTypeDef(DIMENSION_CLASSIFICATION, "Dimension Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef factClassifDef = AtlasTypeUtil.createTraitTypeDef(FACT_CLASSIFICATION, "Fact Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef piiClassifDef = AtlasTypeUtil.createTraitTypeDef(PII_CLASSIFICATION, "PII Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef metricClassifDef = AtlasTypeUtil.createTraitTypeDef(METRIC_CLASSIFICATION, "Metric Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef etlClassifDef = AtlasTypeUtil.createTraitTypeDef(ETL_CLASSIFICATION, "ETL Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef jdbcClassifDef = AtlasTypeUtil.createTraitTypeDef(JDBC_CLASSIFICATION, "JdbcAccess Classification", "1.0", Collections.<String>emptySet());
+ AtlasClassificationDef logClassifDef = AtlasTypeUtil.createTraitTypeDef(LOGDATA_CLASSIFICATION, "LogData Classification", "1.0", Collections.<String>emptySet());
+
+ return AtlasTypeUtil.getTypesDef(Collections.<AtlasEnumDef>emptyList(),
+ Collections.<AtlasStructDef>emptyList(),
+ Arrays.asList(dimClassifDef, factClassifDef, piiClassifDef, metricClassifDef, etlClassifDef, jdbcClassifDef, logClassifDef),
+ Arrays.asList(dbType, sdType, colType, tblType, procType, viewType));
}
void createEntities() throws Exception {
@@ -273,25 +267,25 @@ public class QuickStartV2 {
AtlasEntity storageDesc = createStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true);
// Column entities
- List<AtlasEntity> salesFactColumns = ImmutableList.of(createColumn(TIME_ID_COLUMN, "int", "time id"),
+ List<AtlasEntity> salesFactColumns = Arrays.asList(createColumn(TIME_ID_COLUMN, "int", "time id"),
createColumn(PRODUCT_ID_COLUMN, "int", "product id"),
createColumn(CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION),
createColumn(SALES_COLUMN, "double", "product id", METRIC_CLASSIFICATION));
- List<AtlasEntity> logFactColumns = ImmutableList.of(createColumn(TIME_ID_COLUMN, "int", "time id"),
+ List<AtlasEntity> logFactColumns = Arrays.asList(createColumn(TIME_ID_COLUMN, "int", "time id"),
createColumn(APP_ID_COLUMN, "int", "app id"),
createColumn(MACHINE_ID_COLUMN, "int", "machine id"),
createColumn(LOG_COLUMN, "string", "log data", LOGDATA_CLASSIFICATION));
- List<AtlasEntity> productDimColumns = ImmutableList.of(createColumn(PRODUCT_ID_COLUMN, "int", "product id"),
+ List<AtlasEntity> productDimColumns = Arrays.asList(createColumn(PRODUCT_ID_COLUMN, "int", "product id"),
createColumn(PRODUCT_NAME_COLUMN, "string", "product name"),
createColumn(BRAND_NAME_COLUMN, "int", "brand name"));
- List<AtlasEntity> timeDimColumns = ImmutableList.of(createColumn(TIME_ID_COLUMN, "int", "time id"),
+ List<AtlasEntity> timeDimColumns = Arrays.asList(createColumn(TIME_ID_COLUMN, "int", "time id"),
createColumn(DAY_OF_YEAR_COLUMN, "int", "day Of Year"),
createColumn(WEEKDAY_COLUMN, "int", "week Day"));
- List<AtlasEntity> customerDimColumns = ImmutableList.of(createColumn(CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION),
+ List<AtlasEntity> customerDimColumns = Arrays.asList(createColumn(CUSTOMER_ID_COLUMN, "int", "customer id", PII_CLASSIFICATION),
createColumn(NAME_COLUMN, "string", "customer name", PII_CLASSIFICATION),
createColumn(ADDRESS_COLUMN, "string", "customer address", PII_CLASSIFICATION));
@@ -314,23 +308,23 @@ public class QuickStartV2 {
storageDesc, "Jane BI", "Managed", salesFactColumns, METRIC_CLASSIFICATION);
// View entities
- createView(PRODUCT_DIM_VIEW, reportingDB, ImmutableList.of(productDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION);
- createView(CUSTOMER_DIM_VIEW, reportingDB, ImmutableList.of(customerDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION);
+ createView(PRODUCT_DIM_VIEW, reportingDB, Collections.singletonList(productDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION);
+ createView(CUSTOMER_DIM_VIEW, reportingDB, Collections.singletonList(customerDim), DIMENSION_CLASSIFICATION, JDBC_CLASSIFICATION);
// Process entities
createProcess(LOAD_SALES_DAILY_PROCESS, "hive query for daily summary", "John ETL",
- ImmutableList.of(salesFact, timeDim),
- ImmutableList.of(salesFactDaily),
+ Arrays.asList(salesFact, timeDim),
+ Collections.singletonList(salesFactDaily),
"create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION);
createProcess(LOAD_SALES_MONTHLY_PROCESS, "hive query for monthly summary", "John ETL",
- ImmutableList.of(salesFactDaily),
- ImmutableList.of(salesFactMonthly),
+ Collections.singletonList(salesFactDaily),
+ Collections.singletonList(salesFactMonthly),
"create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION);
createProcess(LOAD_LOGS_MONTHLY_PROCESS, "hive query for monthly summary", "Tim ETL",
- ImmutableList.of(loggingFactDaily),
- ImmutableList.of(loggingFactMonthly),
+ Collections.singletonList(loggingFactDaily),
+ Collections.singletonList(loggingFactMonthly),
"create table as select ", "plan", "id", "graph", ETL_CLASSIFICATION);
}
@@ -364,7 +358,7 @@ public class QuickStartV2 {
private List<AtlasClassification> toAtlasClassifications(String[] traitNames) {
List<AtlasClassification> ret = new ArrayList<>();
- ImmutableList<String> traits = ImmutableList.copyOf(traitNames);
+ List<String> traits = Arrays.asList(traitNames);
if (CollectionUtils.isNotEmpty(traits)) {
for (String trait : traits) {
[30/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java
index ffe859b..3e60243 100755
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphHelper.java
@@ -24,12 +24,15 @@ import com.google.common.collect.HashBiMap;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
+import org.apache.atlas.RequestContextV1;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasEntity.Status;
import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.instance.AtlasRelationship;
import org.apache.atlas.model.typedef.AtlasRelationshipDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection;
import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.RepositoryException;
@@ -43,31 +46,11 @@ import org.apache.atlas.repository.store.graph.v1.AtlasGraphUtilsV1;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasRelationshipType;
import org.apache.atlas.type.AtlasType;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.ValueConversionException;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.exception.EntityNotFoundException;
import org.apache.atlas.util.AttributeValueMap;
import org.apache.atlas.util.IndexedInstance;
-import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
-import org.codehaus.jettison.json.JSONArray;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -82,7 +65,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
-import java.util.Stack;
import java.util.UUID;
import static org.apache.atlas.type.AtlasStructType.AtlasAttribute.AtlasRelationshipEdgeDirection.BOTH;
@@ -97,8 +79,6 @@ public final class GraphHelper {
private static final Logger LOG = LoggerFactory.getLogger(GraphHelper.class);
public static final String EDGE_LABEL_PREFIX = "__";
- private static final TypeSystem typeSystem = TypeSystem.getInstance();
-
public static final String RETRY_COUNT = "atlas.graph.storage.num.retries";
public static final String RETRY_DELAY = "atlas.graph.storage.retry.sleeptime.ms";
@@ -143,7 +123,7 @@ public final class GraphHelper {
}
- public AtlasVertex createVertexWithIdentity(ITypedReferenceableInstance typedInstance, Set<String> superTypeNames) {
+ public AtlasVertex createVertexWithIdentity(Referenceable typedInstance, Set<String> superTypeNames) {
final String guid = UUID.randomUUID().toString();
final AtlasVertex vertexWithIdentity = createVertexWithoutIdentity(typedInstance.getTypeName(),
@@ -153,7 +133,7 @@ public final class GraphHelper {
setProperty(vertexWithIdentity, Constants.GUID_PROPERTY_KEY, guid);
// add version information
- setProperty(vertexWithIdentity, Constants.VERSION_PROPERTY_KEY, Long.valueOf(typedInstance.getId().version));
+ setProperty(vertexWithIdentity, Constants.VERSION_PROPERTY_KEY, Long.valueOf(typedInstance.getId().getVersion()));
return vertexWithIdentity;
}
@@ -179,12 +159,12 @@ public final class GraphHelper {
setProperty(vertexWithoutIdentity, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
// add timestamp information
- setProperty(vertexWithoutIdentity, Constants.TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
+ setProperty(vertexWithoutIdentity, Constants.TIMESTAMP_PROPERTY_KEY, RequestContextV1.get().getRequestTime());
setProperty(vertexWithoutIdentity, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- RequestContext.get().getRequestTime());
+ RequestContextV1.get().getRequestTime());
- setProperty(vertexWithoutIdentity, Constants.CREATED_BY_KEY, RequestContext.get().getUser());
- setProperty(vertexWithoutIdentity, Constants.MODIFIED_BY_KEY, RequestContext.get().getUser());
+ setProperty(vertexWithoutIdentity, Constants.CREATED_BY_KEY, RequestContextV1.get().getUser());
+ setProperty(vertexWithoutIdentity, Constants.MODIFIED_BY_KEY, RequestContextV1.get().getUser());
return vertexWithoutIdentity;
}
@@ -196,10 +176,10 @@ public final class GraphHelper {
AtlasEdge edge = graph.addEdge(fromVertex, toVertex, edgeLabel);
setProperty(edge, Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
- setProperty(edge, Constants.TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
- setProperty(edge, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
- setProperty(edge, Constants.CREATED_BY_KEY, RequestContext.get().getUser());
- setProperty(edge, Constants.MODIFIED_BY_KEY, RequestContext.get().getUser());
+ setProperty(edge, Constants.TIMESTAMP_PROPERTY_KEY, RequestContextV1.get().getRequestTime());
+ setProperty(edge, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContextV1.get().getRequestTime());
+ setProperty(edge, Constants.CREATED_BY_KEY, RequestContextV1.get().getUser());
+ setProperty(edge, Constants.MODIFIED_BY_KEY, RequestContextV1.get().getUser());
if (LOG.isDebugEnabled()) {
LOG.debug("Added {}", string(edge));
@@ -673,17 +653,6 @@ public final class GraphHelper {
return prefix + "." + key;
}
- public static String getQualifiedFieldName(ITypedInstance typedInstance, AttributeInfo attributeInfo) throws AtlasException {
- IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
- return getQualifiedFieldName(dataType, attributeInfo.name);
- }
-
- public static String getQualifiedFieldName(IDataType dataType, String attributeName) throws AtlasException {
- return dataType.getTypeCategory() == DataTypes.TypeCategory.STRUCT ? dataType.getName() + "." + attributeName
- // else class or trait
- : ((HierarchicalType) dataType).getQualifiedName(attributeName);
- }
-
public static String getTraitLabel(String typeName, String attrName) {
return attrName;
}
@@ -710,13 +679,8 @@ public final class GraphHelper {
return superTypes;
}
- public static String getEdgeLabel(ITypedInstance typedInstance, AttributeInfo aInfo) throws AtlasException {
- IDataType dataType = typeSystem.getDataType(IDataType.class, typedInstance.getTypeName());
- return getEdgeLabel(dataType, aInfo);
- }
-
- public static String getEdgeLabel(IDataType dataType, AttributeInfo aInfo) throws AtlasException {
- return GraphHelper.EDGE_LABEL_PREFIX + getQualifiedFieldName(dataType, aInfo.name);
+ public static String getEdgeLabel(AtlasAttribute aInfo) throws AtlasException {
+ return GraphHelper.EDGE_LABEL_PREFIX + aInfo.getQualifiedName();
}
public static Id getIdFromVertex(String dataTypeName, AtlasVertex vertex) {
@@ -784,26 +748,24 @@ public final class GraphHelper {
* @return
* @throws AtlasException
*/
- public AtlasVertex getVertexForInstanceByUniqueAttribute(ClassType classType, IReferenceableInstance instance)
+ public AtlasVertex getVertexForInstanceByUniqueAttribute(AtlasEntityType classType, Referenceable instance)
throws AtlasException {
if (LOG.isDebugEnabled()) {
LOG.debug("Checking if there is an instance with the same unique attributes for instance {}", instance.toShortString());
}
AtlasVertex result = null;
- for (AttributeInfo attributeInfo : classType.fieldMapping().fields.values()) {
- if (attributeInfo.isUnique) {
- String propertyKey = getQualifiedFieldName(classType, attributeInfo.name);
- try {
- result = findVertex(propertyKey, instance.get(attributeInfo.name),
- Constants.ENTITY_TYPE_PROPERTY_KEY, classType.getName(),
- Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found vertex by unique attribute : {}={}", propertyKey, instance.get(attributeInfo.name));
- }
- } catch (EntityNotFoundException e) {
- //Its ok if there is no entity with the same unique value
+ for (AtlasAttribute attributeInfo : classType.getUniqAttributes().values()) {
+ String propertyKey = attributeInfo.getQualifiedName();
+ try {
+ result = findVertex(propertyKey, instance.get(attributeInfo.getName()),
+ Constants.ENTITY_TYPE_PROPERTY_KEY, classType.getTypeName(),
+ Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Found vertex by unique attribute : {}={}", propertyKey, instance.get(attributeInfo.getName()));
}
+ } catch (EntityNotFoundException e) {
+ //Its ok if there is no entity with the same unique value
}
}
@@ -821,23 +783,21 @@ public final class GraphHelper {
* @return
* @throws AtlasException
*/
- public List<AtlasVertex> getVerticesForInstancesByUniqueAttribute(ClassType classType, List<? extends IReferenceableInstance> instancesForClass) throws AtlasException {
+ public List<AtlasVertex> getVerticesForInstancesByUniqueAttribute(AtlasEntityType classType, List<? extends Referenceable> instancesForClass) throws AtlasException {
//For each attribute, need to figure out what values to search for and which instance(s)
//those values correspond to.
Map<String, AttributeValueMap> map = new HashMap<String, AttributeValueMap>();
- for (AttributeInfo attributeInfo : classType.fieldMapping().fields.values()) {
- if (attributeInfo.isUnique) {
- String propertyKey = getQualifiedFieldName(classType, attributeInfo.name);
- AttributeValueMap mapForAttribute = new AttributeValueMap();
- for(int idx = 0; idx < instancesForClass.size(); idx++) {
- IReferenceableInstance instance = instancesForClass.get(idx);
- Object value = instance.get(attributeInfo.name);
- mapForAttribute.put(value, instance, idx);
- }
- map.put(propertyKey, mapForAttribute);
+ for (AtlasAttribute attributeInfo : classType.getUniqAttributes().values()) {
+ String propertyKey = attributeInfo.getQualifiedName();
+ AttributeValueMap mapForAttribute = new AttributeValueMap();
+ for(int idx = 0; idx < instancesForClass.size(); idx++) {
+ Referenceable instance = instancesForClass.get(idx);
+ Object value = instance.get(attributeInfo.getName());
+ mapForAttribute.put(value, instance, idx);
}
+ map.put(propertyKey, mapForAttribute);
}
AtlasVertex[] result = new AtlasVertex[instancesForClass.size()];
@@ -849,7 +809,7 @@ public final class GraphHelper {
//construct gremlin query
AtlasGraphQuery query = graph.query();
- query.has(Constants.ENTITY_TYPE_PROPERTY_KEY, classType.getName());
+ query.has(Constants.ENTITY_TYPE_PROPERTY_KEY, classType.getTypeName());
query.has(Constants.STATE_PROPERTY_KEY,Id.EntityState.ACTIVE.name());
List<AtlasGraphQuery> orChildren = new ArrayList<AtlasGraphQuery>();
@@ -965,6 +925,7 @@ public final class GraphHelper {
* @return set of VertexInfo for all composite entities
* @throws AtlasException
*/
+ /*
public Set<VertexInfo> getCompositeVertices(AtlasVertex entityVertex) throws AtlasException {
Set<VertexInfo> result = new HashSet<>();
Stack<AtlasVertex> vertices = new Stack<>();
@@ -1035,20 +996,22 @@ public final class GraphHelper {
}
return result;
}
+ */
- public static ITypedReferenceableInstance[] deserializeClassInstances(TypeSystem typeSystem, String entityInstanceDefinition)
+ /*
+ public static Referenceable[] deserializeClassInstances(AtlasTypeRegistry typeRegistry, String entityInstanceDefinition)
throws AtlasException {
try {
JSONArray referableInstances = new JSONArray(entityInstanceDefinition);
- ITypedReferenceableInstance[] instances = new ITypedReferenceableInstance[referableInstances.length()];
+ Referenceable[] instances = new Referenceable[referableInstances.length()];
for (int index = 0; index < referableInstances.length(); index++) {
Referenceable entityInstance =
- InstanceSerialization.fromJsonReferenceable(referableInstances.getString(index), true);
- ITypedReferenceableInstance typedInstrance = getTypedReferenceableInstance(typeSystem, entityInstance);
+ AtlasType.fromV1Json(referableInstances.getString(index), Referenceable.class);
+ Referenceable typedInstrance = getTypedReferenceableInstance(typeRegistry, entityInstance);
instances[index] = typedInstrance;
}
return instances;
- } catch(ValueConversionException | TypeNotFoundException e) {
+ } catch(TypeNotFoundException e) {
throw e;
} catch (Exception e) { // exception from deserializer
LOG.error("Unable to deserialize json={}", entityInstanceDefinition, e);
@@ -1056,28 +1019,22 @@ public final class GraphHelper {
}
}
- public static ITypedReferenceableInstance getTypedReferenceableInstance(TypeSystem typeSystem, Referenceable entityInstance)
+ public static Referenceable getTypedReferenceableInstance(AtlasTypeRegistry typeRegistry, Referenceable entityInstance)
throws AtlasException {
final String entityTypeName = ParamChecker.notEmpty(entityInstance.getTypeName(), "Entity type cannot be null");
- ClassType entityType = typeSystem.getDataType(ClassType.class, entityTypeName);
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entityTypeName);
//Both assigned id and values are required for full update
//classtype.convert() will remove values if id is assigned. So, set temp id, convert and
// then replace with original id
Id origId = entityInstance.getId();
- entityInstance.replaceWithNewId(new Id(entityInstance.getTypeName()));
- ITypedReferenceableInstance typedInstrance = entityType.convert(entityInstance, Multiplicity.REQUIRED);
- ((ReferenceableInstance)typedInstrance).replaceWithNewId(origId);
+ entityInstance.setId(new Id(entityInstance.getTypeName()));
+ Referenceable typedInstrance = new Referenceable(entityInstance);
+ typedInstrance.setId(origId);
return typedInstrance;
}
-
- public static boolean isReference(IDataType type) {
-
- return type.getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
- type.getTypeCategory() == DataTypes.TypeCategory.CLASS;
-
- }
+ */
public static boolean isInternalType(AtlasVertex vertex) {
return vertex != null && isInternalType(getTypeName(vertex));
@@ -1087,9 +1044,9 @@ public final class GraphHelper {
return typeName != null && typeName.startsWith(Constants.INTERNAL_PROPERTY_KEY_PREFIX);
}
- public static void setArrayElementsProperty(IDataType elementType, AtlasVertex instanceVertex, String propertyName, List<Object> values) {
+ public static void setArrayElementsProperty(AtlasType elementType, AtlasVertex instanceVertex, String propertyName, List<Object> values) {
String actualPropertyName = GraphHelper.encodePropertyKey(propertyName);
- if(GraphHelper.isReference(elementType)) {
+ if(AtlasGraphUtilsV1.isReference(elementType)) {
setListPropertyFromElementIds(instanceVertex, actualPropertyName, (List)values);
}
else {
@@ -1097,9 +1054,9 @@ public final class GraphHelper {
}
}
- public static void setMapValueProperty(IDataType elementType, AtlasVertex instanceVertex, String propertyName, Object value) {
+ public static void setMapValueProperty(AtlasType elementType, AtlasVertex instanceVertex, String propertyName, Object value) {
String actualPropertyName = GraphHelper.encodePropertyKey(propertyName);
- if(GraphHelper.isReference(elementType)) {
+ if(AtlasGraphUtilsV1.isReference(elementType)) {
instanceVertex.setPropertyFromElementId(actualPropertyName, (AtlasEdge)value);
}
else {
@@ -1107,16 +1064,6 @@ public final class GraphHelper {
}
}
- public static Object getMapValueProperty(IDataType elementType, AtlasVertex instanceVertex, String propertyName) {
- String actualPropertyName = GraphHelper.encodePropertyKey(propertyName);
- if(GraphHelper.isReference(elementType)) {
- return instanceVertex.getProperty(actualPropertyName, AtlasEdge.class);
- }
- else {
- return instanceVertex.getProperty(actualPropertyName, Object.class).toString();
- }
- }
-
public static Object getMapValueProperty(AtlasType elementType, AtlasVertex instanceVertex, String propertyName) {
String vertexPropertyName = GraphHelper.encodePropertyKey(propertyName);
@@ -1138,16 +1085,6 @@ public final class GraphHelper {
}
}
- public static List<Object> getArrayElementsProperty(IDataType elementType, AtlasVertex instanceVertex, String propertyName) {
- String actualPropertyName = GraphHelper.encodePropertyKey(propertyName);
- if(GraphHelper.isReference(elementType)) {
- return (List)instanceVertex.getListProperty(actualPropertyName, AtlasEdge.class);
- }
- else {
- return (List)instanceVertex.getListProperty(actualPropertyName);
- }
- }
-
public static void dumpToLog(final AtlasGraph<?,?> graph) {
LOG.debug("*******************Graph Dump****************************");
LOG.debug("Vertices of {}", graph);
@@ -1162,7 +1099,7 @@ public final class GraphHelper {
LOG.debug("*******************Graph Dump****************************");
}
- public static String string(ITypedReferenceableInstance instance) {
+ public static String string(Referenceable instance) {
return String.format("entity[type=%s guid=%s]", instance.getTypeName(), instance.getId()._getId());
}
@@ -1245,6 +1182,7 @@ public final class GraphHelper {
return instanceVertexId;
}
+ /*
public static AttributeInfo getAttributeInfoForSystemAttributes(String field) {
switch (field) {
case Constants.STATE_PROPERTY_KEY:
@@ -1259,6 +1197,7 @@ public final class GraphHelper {
}
return null;
}
+ */
public static boolean elementExists(AtlasElement v) {
return v != null && v.exists();
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/GraphSchemaInitializer.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphSchemaInitializer.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphSchemaInitializer.java
deleted file mode 100644
index e877680..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphSchemaInitializer.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.setup.SetupException;
-import org.apache.atlas.setup.SetupStep;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-/**
- * A {@link SetupStep} that initializes the Graph backend for Atlas.
- *
- * This class will initialize the specific backend implementation specified in
- * the Atlas configuration for the key atlas.graph.storage.backend.
- */
-@Component
-public class GraphSchemaInitializer implements SetupStep {
-
- private static final Logger LOG = LoggerFactory.getLogger(GraphSchemaInitializer.class);
-
- @Override
- public void run() throws SetupException {
- LOG.info("Initializing graph schema backend.");
- try {
- // The implementation of this method internally creates the schema.
- AtlasGraphProvider.getGraphInstance();
- LOG.info("Completed initializing graph schema backend.");
- } catch (Exception e) {
- LOG.error("Could not initialize graph schema backend due to exception, {}", e.getMessage(), e);
- throw new SetupException("Could not initialize graph schema due to exception", e);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/GraphToTypedInstanceMapper.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphToTypedInstanceMapper.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphToTypedInstanceMapper.java
deleted file mode 100644
index d7a8fa9..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphToTypedInstanceMapper.java
+++ /dev/null
@@ -1,452 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.persistence.AtlasSystemAttributes;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.repository.graph.GraphHelper.string;
-
-@Component
-@Deprecated
-public final class GraphToTypedInstanceMapper {
-
- private static final Logger LOG = LoggerFactory.getLogger(GraphToTypedInstanceMapper.class);
- private static TypeSystem typeSystem = TypeSystem.getInstance();
- private static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- private final AtlasGraph atlasGraph;
-
- @Inject
- public GraphToTypedInstanceMapper(AtlasGraph atlasGraph) {
- this.atlasGraph = atlasGraph;
- }
-
- public ITypedReferenceableInstance mapGraphToTypedInstance(String guid, AtlasVertex instanceVertex)
- throws AtlasException {
-
- if(LOG.isDebugEnabled()) {
- //We don't do a cache check here since we want that to be at a higher level
- //where the vertex lookup can also be avoided. However, this is a convenient
- //place to add a check to see if there are any places that were missed.
- if(RequestContext.get().getInstanceV1(guid) != null) {
- LOG.warn("Looking up previously cached guid at: ", new Exception());
- }
-
- LOG.debug("Mapping graph root vertex {} to typed instance for guid {}", instanceVertex, guid);
- }
-
- String typeName = GraphHelper.getSingleValuedProperty(instanceVertex, Constants.ENTITY_TYPE_PROPERTY_KEY, String.class);
- List<String> traits = GraphHelper.getTraitNames(instanceVertex);
- String state = GraphHelper.getStateAsString(instanceVertex);
- String createdBy = GraphHelper.getCreatedByAsString(instanceVertex);
- String modifiedBy = GraphHelper.getModifiedByAsString(instanceVertex);
- Date createdTime = new Date(GraphHelper.getCreatedTime(instanceVertex));
- Date modifiedTime = new Date(GraphHelper.getModifiedTime(instanceVertex));
- AtlasSystemAttributes systemAttributes = new AtlasSystemAttributes(createdBy, modifiedBy, createdTime, modifiedTime);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found createdBy : {} modifiedBy : {} createdTime: {} modifedTime: {}", createdBy, modifiedBy, createdTime, modifiedTime);
- }
-
- Id id = new Id(guid, Integer.parseInt(String.valueOf(GraphHelper.getProperty(instanceVertex, Constants.VERSION_PROPERTY_KEY))), typeName, state);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Created id {} for instance type {}", id, typeName);
- }
-
- ClassType classType = typeSystem.getDataType(ClassType.class, typeName);
- ITypedReferenceableInstance typedInstance =
- classType.createInstance(id, systemAttributes, traits.toArray(new String[traits.size()]));
-
- mapVertexToInstance(instanceVertex, typedInstance, classType.fieldMapping().fields);
- mapVertexToInstanceTraits(instanceVertex, typedInstance, traits);
- RequestContext.get().cache(typedInstance);
- return typedInstance;
- }
-
- private void mapVertexToInstanceTraits(AtlasVertex instanceVertex, ITypedReferenceableInstance typedInstance,
- List<String> traits) throws AtlasException {
- for (String traitName : traits) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mapping trait {} to instance", traitName);
- }
-
- TraitType traitType = typeSystem.getDataType(TraitType.class, traitName);
- mapVertexToTraitInstance(instanceVertex, typedInstance, traitName, traitType);
- }
- }
-
- public void mapVertexToInstance(AtlasVertex instanceVertex, ITypedInstance typedInstance,
- Map<String, AttributeInfo> fields) throws AtlasException {
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping vertex {} to instance {} for fields", instanceVertex, typedInstance.getTypeName(),
- fields);
- }
-
- for (AttributeInfo attributeInfo : fields.values()) {
- mapVertexToAttribute(instanceVertex, typedInstance, attributeInfo);
- }
- }
-
- private void mapVertexToAttribute(AtlasVertex instanceVertex, ITypedInstance typedInstance,
- AttributeInfo attributeInfo) throws AtlasException {
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping attributeInfo {}", attributeInfo.name);
- }
-
- final IDataType dataType = attributeInfo.dataType();
- final String vertexPropertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
- String relationshipLabel = GraphHelper.getEdgeLabel(typedInstance, attributeInfo);
-
- switch (dataType.getTypeCategory()) {
- case PRIMITIVE:
- mapVertexToPrimitive(instanceVertex, typedInstance, attributeInfo);
- break; // add only if vertex has this attribute
-
- case ENUM:
- Object propertyValue = GraphHelper.getProperty(instanceVertex, vertexPropertyName);
- if (propertyValue == null) {
- return;
- }
-
- typedInstance.set(attributeInfo.name, dataType.convert(propertyValue, Multiplicity.REQUIRED));
- break;
-
- case ARRAY:
- mapVertexToArrayInstance(instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
- break;
-
- case MAP:
- mapVertexToMapInstance(instanceVertex, typedInstance, attributeInfo, vertexPropertyName);
- break;
-
- case STRUCT:
- ITypedStruct structInstance = mapVertexToStructInstance(instanceVertex,
- (StructType) attributeInfo.dataType(), relationshipLabel, null);
- typedInstance.set(attributeInfo.name, structInstance);
- break;
-
- case TRAIT:
- // do NOTHING - handled in class
- break;
-
- case CLASS:
- AtlasEdge nullEdge = null;
- Object idOrInstance = mapVertexToClassReference(instanceVertex, attributeInfo, relationshipLabel,
- attributeInfo.dataType(), nullEdge);
- if (idOrInstance != null) {
- typedInstance.set(attributeInfo.name, idOrInstance);
- }
- break;
-
- default:
- break;
- }
- }
-
- private Object mapVertexToClassReference(AtlasVertex instanceVertex, AttributeInfo attributeInfo,
- String relationshipLabel, IDataType dataType, AtlasEdge optionalEdge) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
- }
-
- AtlasEdge edge = null;
- if (optionalEdge == null) {
- edge = graphHelper.getEdgeForLabel(instanceVertex, relationshipLabel);
- } else {
- edge = optionalEdge;
- }
-
- if (GraphHelper.elementExists(edge)) {
- final AtlasVertex referenceVertex = edge.getInVertex();
- final String guid = GraphHelper.getSingleValuedProperty(referenceVertex, Constants.GUID_PROPERTY_KEY, String.class);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found vertex {} for label {} with guid {}", referenceVertex, relationshipLabel, guid);
- }
-
- if (attributeInfo.isComposite) {
- //Also, when you retrieve a type's instance, you get the complete object graph of the composites
- LOG.debug("Found composite, mapping vertex to instance");
- ITypedReferenceableInstance cached = RequestContext.get().getInstanceV1(guid);
- if(cached != null) {
- return cached;
- }
- return mapGraphToTypedInstance(guid, referenceVertex);
- } else {
- String state = GraphHelper.getStateAsString(referenceVertex);
- Id referenceId = new Id(guid, GraphHelper.getVersion(referenceVertex).intValue(), dataType.getName(), state);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found non-composite, adding id {} ", referenceId);
- }
-
- return referenceId;
- }
- }
-
- return null;
- }
-
- @SuppressWarnings("unchecked")
- private void mapVertexToArrayInstance(AtlasVertex<?,?> instanceVertex, ITypedInstance typedInstance,
- AttributeInfo attributeInfo, String propertyName) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
- }
-
- final DataTypes.ArrayType arrayType = (DataTypes.ArrayType) attributeInfo.dataType();
- final IDataType elementType = arrayType.getElemType();
-
- List<Object> list = GraphHelper.getArrayElementsProperty(elementType, instanceVertex, propertyName);
-
- if (list == null || list.size() == 0) {
- return;
- }
-
- String edgeLabel = GraphHelper.EDGE_LABEL_PREFIX + propertyName;
- ArrayList values = new ArrayList();
- for (Object aList : list) {
- values.add(mapVertexToCollectionEntry(instanceVertex, attributeInfo, elementType, aList,
- edgeLabel));
- }
-
- if (values.size() > 0) {
- typedInstance.set(attributeInfo.name, values);
- }
- }
-
- private Object mapVertexToCollectionEntry(AtlasVertex instanceVertex, AttributeInfo attributeInfo,
- IDataType elementType, Object value, String edgeLabel) throws AtlasException {
- switch (elementType.getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- return value;
-
- case ARRAY:
- case MAP:
- case TRAIT:
- // do nothing
- break;
-
- case STRUCT:
- return mapVertexToStructInstance(instanceVertex, (StructType) elementType, edgeLabel, (AtlasEdge) value);
-
- case CLASS:
- return mapVertexToClassReference(instanceVertex, attributeInfo, edgeLabel, elementType, (AtlasEdge) value);
-
- default:
- break;
- }
-
- throw new IllegalArgumentException();
- }
-
- @SuppressWarnings("unchecked")
- private void mapVertexToMapInstance(AtlasVertex<?,?> instanceVertex, ITypedInstance typedInstance,
- AttributeInfo attributeInfo, final String propertyName) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mapping vertex {} to array {}", instanceVertex, attributeInfo.name);
- }
-
- List<String> keys = GraphHelper.getListProperty(instanceVertex, propertyName);
- if (keys == null || keys.size() == 0) {
- return;
- }
- DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
- final IDataType valueType = mapType.getValueType();
-
- HashMap<String,Object> values = new HashMap<>();
- for (String key : keys) {
- final String keyPropertyName = propertyName + "." + key;
- final String edgeLabel = GraphHelper.EDGE_LABEL_PREFIX + keyPropertyName;
- final Object keyValue = GraphHelper.getMapValueProperty(valueType, instanceVertex, keyPropertyName);
- Object mapValue = mapVertexToCollectionEntry(instanceVertex, attributeInfo, valueType, keyValue, edgeLabel);
- if (mapValue != null) {
- values.put(key, mapValue);
- }
- }
-
- if (!values.isEmpty()) {
- typedInstance.set(attributeInfo.name, values);
- }
- }
-
- private ITypedStruct mapVertexToStructInstance(AtlasVertex instanceVertex, StructType structType,
- String relationshipLabel, AtlasEdge optionalEdge) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mapping {} to struct {}", string(instanceVertex), relationshipLabel);
- }
-
- ITypedStruct structInstance = null;
-
- AtlasEdge edge;
- if (optionalEdge == null) {
- edge = graphHelper.getEdgeForLabel(instanceVertex, relationshipLabel);
- } else {
- edge = optionalEdge;
- }
-
- if (GraphHelper.elementExists(edge)) {
- structInstance = structType.createInstance();
- AtlasVertex structInstanceVertex = edge.getInVertex();
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found struct instance {}, mapping to instance {} ", string(structInstanceVertex),
- structInstance.getTypeName());
- }
-
- mapVertexToInstance(structInstanceVertex, structInstance, structType.fieldMapping().fields);
- }
- return structInstance;
- }
-
- private void mapVertexToTraitInstance(AtlasVertex instanceVertex, ITypedReferenceableInstance typedInstance,
- String traitName, TraitType traitType) throws AtlasException {
- ITypedStruct traitInstance = (ITypedStruct) typedInstance.getTrait(traitName);
-
- mapVertexToTraitInstance(instanceVertex, typedInstance.getTypeName(), traitName, traitType, traitInstance);
- }
-
- private void mapVertexToTraitInstance(AtlasVertex<?,?> instanceVertex, String typedInstanceTypeName, String traitName,
- TraitType traitType, ITypedStruct traitInstance) throws AtlasException {
- String relationshipLabel = GraphHelper.getTraitLabel(typedInstanceTypeName, traitName);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Finding edge for {} -> label {} ", instanceVertex, relationshipLabel);
- }
-
- for (AtlasEdge<?,?> edge : instanceVertex.getEdges(AtlasEdgeDirection.OUT, relationshipLabel)) {
- final AtlasVertex<?,?> traitInstanceVertex = edge.getInVertex();
- if (traitInstanceVertex != null) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found trait instance vertex {}, mapping to instance {} ", traitInstanceVertex,
- traitInstance.getTypeName());
- }
-
- mapVertexToInstance(traitInstanceVertex, traitInstance, traitType.fieldMapping().fields);
- break;
- }
- }
- }
-
- private void mapVertexToPrimitive(AtlasVertex<?,?> instanceVertex, ITypedInstance typedInstance,
- AttributeInfo attributeInfo) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding primitive {} from vertex {}", attributeInfo, instanceVertex);
- }
-
- final String vertexPropertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
- if (GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Object.class) == null) {
- return;
- }
-
- if (attributeInfo.dataType() == DataTypes.STRING_TYPE) {
- typedInstance.setString(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, String.class));
- } else if (attributeInfo.dataType() == DataTypes.SHORT_TYPE) {
- typedInstance.setShort(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Short.class));
- } else if (attributeInfo.dataType() == DataTypes.INT_TYPE) {
- typedInstance.setInt(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Integer.class));
- } else if (attributeInfo.dataType() == DataTypes.BIGINTEGER_TYPE) {
- typedInstance.setBigInt(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, BigInteger.class));
- } else if (attributeInfo.dataType() == DataTypes.BOOLEAN_TYPE) {
- typedInstance.setBoolean(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Boolean.class));
- } else if (attributeInfo.dataType() == DataTypes.BYTE_TYPE) {
- typedInstance.setByte(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Byte.class));
- } else if (attributeInfo.dataType() == DataTypes.LONG_TYPE) {
- typedInstance.setLong(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Long.class));
- } else if (attributeInfo.dataType() == DataTypes.FLOAT_TYPE) {
- typedInstance.setFloat(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Float.class));
- } else if (attributeInfo.dataType() == DataTypes.DOUBLE_TYPE) {
- typedInstance.setDouble(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Double.class));
- } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- typedInstance
- .setBigDecimal(attributeInfo.name, GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, BigDecimal.class));
- } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
- final Long dateVal = GraphHelper.getSingleValuedProperty(instanceVertex, vertexPropertyName, Long.class);
- typedInstance.setDate(attributeInfo.name, new Date(dateVal));
- }
- }
-
-
- public ITypedInstance getReferredEntity(String edgeId, IDataType<?> referredType) throws AtlasException {
- final AtlasEdge edge = getGraph().getEdge(edgeId);
- if (edge != null) {
- final AtlasVertex referredVertex = edge.getInVertex();
- if (referredVertex != null) {
- switch (referredType.getTypeCategory()) {
- case STRUCT:
- if (LOG.isDebugEnabled()) {
- LOG.debug("Found struct instance vertex {}, mapping to instance {} ", referredVertex,
- referredType.getName());
- }
-
- StructType structType = (StructType) referredType;
- ITypedStruct instance = structType.createInstance();
- Map<String, AttributeInfo> fields = structType.fieldMapping().fields;
- mapVertexToInstance(referredVertex, instance, fields);
- return instance;
- case CLASS:
- //TODO isComposite handling for class loads
- return GraphHelper.getIdFromVertex(referredType.getName(), referredVertex);
- default:
- throw new UnsupportedOperationException("Loading " + referredType.getTypeCategory() + " is not supported");
- }
- }
- }
- return null;
- }
-
- private AtlasGraph getGraph() throws RepositoryException {
- return atlasGraph;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/HardDeleteHandler.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/HardDeleteHandler.java b/repository/src/main/java/org/apache/atlas/repository/graph/HardDeleteHandler.java
deleted file mode 100644
index e00ef96..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/HardDeleteHandler.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.ConditionalOnAtlasProperty;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-
-@Component
-@ConditionalOnAtlasProperty(property = "atlas.DeleteHandler.impl")
-public class HardDeleteHandler extends DeleteHandler {
-
- @Inject
- public HardDeleteHandler(TypeSystem typeSystem) {
- super(typeSystem, true, false);
- }
-
- @Override
- protected void _deleteVertex(AtlasVertex instanceVertex, boolean force) {
- graphHelper.removeVertex(instanceVertex);
- }
-
- @Override
- protected void deleteEdge(AtlasEdge edge, boolean force) throws AtlasException {
- graphHelper.removeEdge(edge);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/SoftDeleteHandler.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/SoftDeleteHandler.java b/repository/src/main/java/org/apache/atlas/repository/graph/SoftDeleteHandler.java
deleted file mode 100644
index b37fe75..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/SoftDeleteHandler.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.annotation.ConditionalOnAtlasProperty;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-
-import static org.apache.atlas.repository.Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY;
-import static org.apache.atlas.repository.Constants.MODIFIED_BY_KEY;
-import static org.apache.atlas.repository.Constants.STATE_PROPERTY_KEY;
-
-@Component
-@ConditionalOnAtlasProperty(property = "atlas.DeleteHandler.impl", isDefault = true)
-public class SoftDeleteHandler extends DeleteHandler {
-
- @Inject
- public SoftDeleteHandler(TypeSystem typeSystem) {
- super(typeSystem, false, true);
- }
-
- @Override
- protected void _deleteVertex(AtlasVertex instanceVertex, boolean force) {
- if (force) {
- graphHelper.removeVertex(instanceVertex);
- } else {
- Id.EntityState state = GraphHelper.getState(instanceVertex);
- if (state != Id.EntityState.DELETED) {
- GraphHelper.setProperty(instanceVertex, STATE_PROPERTY_KEY, Id.EntityState.DELETED.name());
- GraphHelper.setProperty(instanceVertex, MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- RequestContext.get().getRequestTime());
- GraphHelper.setProperty(instanceVertex, MODIFIED_BY_KEY, RequestContext.get().getUser());
- }
- }
- }
-
- @Override
- protected void deleteEdge(AtlasEdge edge, boolean force) throws AtlasException {
- if (force) {
- graphHelper.removeEdge(edge);
- } else {
- Id.EntityState state = GraphHelper.getState(edge);
- if (state != Id.EntityState.DELETED) {
- GraphHelper.setProperty(edge, STATE_PROPERTY_KEY, Id.EntityState.DELETED.name());
- GraphHelper
- .setProperty(edge, MODIFICATION_TIMESTAMP_PROPERTY_KEY, RequestContext.get().getRequestTime());
- GraphHelper.setProperty(edge, MODIFIED_BY_KEY, RequestContext.get().getUser());
- }
- }
- }
-}
[40/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java b/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
new file mode 100644
index 0000000..ea77a20
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/HookNotification.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.model.notification;
+
+import org.apache.commons.lang.StringUtils;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Base type of hook message.
+ */
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class HookNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public static final String UNKNOW_USER = "UNKNOWN";
+
+ /**
+ * Type of the hook message.
+ */
+ public enum HookNotificationType {
+ TYPE_CREATE, TYPE_UPDATE, ENTITY_CREATE, ENTITY_PARTIAL_UPDATE, ENTITY_FULL_UPDATE, ENTITY_DELETE
+ }
+
+ protected HookNotificationType type;
+ protected String user;
+
+ public HookNotification() {
+ }
+
+ public HookNotification(HookNotificationType type, String user) {
+ this.type = type;
+ this.user = user;
+ }
+
+ public HookNotificationType getType() {
+ return type;
+ }
+
+ public void setType(HookNotificationType type) {
+ this.type = type;
+ }
+
+ public String getUser() {
+ if (StringUtils.isEmpty(user)) {
+ return UNKNOW_USER;
+ }
+
+ return user;
+ }
+
+ public void setUser(String user) {
+ this.user = user;
+ }
+
+ public void normalize() { }
+
+ @Override
+ public String toString() {
+ return toString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("HookNotification{");
+ sb.append("type=").append(type);
+ sb.append(", user=").append(user);
+ sb.append("}");
+
+ return sb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/model/notification/MessageVersion.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/model/notification/MessageVersion.java b/intg/src/main/java/org/apache/atlas/model/notification/MessageVersion.java
new file mode 100644
index 0000000..1dafa94
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/model/notification/MessageVersion.java
@@ -0,0 +1,170 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.model.notification;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Represents the version of a notification message.
+ */
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class MessageVersion implements Comparable<MessageVersion>, Serializable {
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * Used for message with no version (old format).
+ */
+ public static final MessageVersion NO_VERSION = new MessageVersion("0");
+ public static final MessageVersion VERSION_1 = new MessageVersion("1.0.0");
+
+ public static final MessageVersion CURRENT_VERSION = VERSION_1;
+
+ private String version;
+
+
+ // ----- Constructors ----------------------------------------------------
+ public MessageVersion() {
+ this.version = CURRENT_VERSION.version;
+ }
+
+ /**
+ * Create a message version.
+ *
+ * @param version the version string
+ */
+ public MessageVersion(String version) {
+ this.version = version;
+
+ try {
+ getVersionParts();
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException(String.format("Invalid version string : %s.", version), e);
+ }
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+
+ // ----- Comparable ------------------------------------------------------
+
+ @Override
+ public int compareTo(MessageVersion that) {
+ if (that == null) {
+ return 1;
+ }
+
+ Integer[] thisParts = getVersionParts();
+ Integer[] thatParts = that.getVersionParts();
+
+ int length = Math.max(thisParts.length, thatParts.length);
+
+ for (int i = 0; i < length; i++) {
+
+ int comp = getVersionPart(thisParts, i) - getVersionPart(thatParts, i);
+
+ if (comp != 0) {
+ return comp;
+ }
+ }
+ return 0;
+ }
+
+
+ // ----- Object overrides ------------------------------------------------
+
+ @Override
+ public boolean equals(Object that) {
+ if (this == that){
+ return true;
+ }
+
+ if (that == null || getClass() != that.getClass()) {
+ return false;
+ }
+
+ return compareTo((MessageVersion) that) == 0;
+ }
+
+ @Override
+ public int hashCode() {
+ return Arrays.hashCode(getVersionParts());
+ }
+
+
+ @Override
+ public String toString() {
+ return "MessageVersion[version=" + version + "]";
+ }
+
+ // ----- helper methods --------------------------------------------------
+
+ /**
+ * Get the version parts array by splitting the version string.
+ * Strip the trailing zeros (i.e. '1.0.0' equals '1').
+ *
+ * @return the version parts array
+ */
+ public Integer[] getVersionParts() {
+
+ String[] sParts = version.split("\\.");
+ ArrayList<Integer> iParts = new ArrayList<>();
+ int trailingZeros = 0;
+
+ for (String sPart : sParts) {
+ Integer iPart = new Integer(sPart);
+
+ if (iPart == 0) {
+ ++trailingZeros;
+ } else {
+ for (int i = 0; i < trailingZeros; ++i) {
+ iParts.add(0);
+ }
+ trailingZeros = 0;
+ iParts.add(iPart);
+ }
+ }
+ return iParts.toArray(new Integer[iParts.size()]);
+ }
+
+ public Integer getVersionPart(Integer[] versionParts, int i) {
+ return i < versionParts.length ? versionParts[i] : 0;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/type/AtlasType.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/type/AtlasType.java b/intg/src/main/java/org/apache/atlas/type/AtlasType.java
index dc0d300..ce0a475 100644
--- a/intg/src/main/java/org/apache/atlas/type/AtlasType.java
+++ b/intg/src/main/java/org/apache/atlas/type/AtlasType.java
@@ -20,25 +20,56 @@ package org.apache.atlas.type;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
+import org.apache.atlas.model.notification.EntityNotification;
+import org.apache.atlas.model.notification.EntityNotification.EntityNotificationType;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
-import org.codehaus.jackson.map.DeserializationConfig;
-import org.codehaus.jackson.map.ObjectMapper;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityPartialUpdateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.TypeRequest;
+import org.codehaus.jackson.*;
+import org.codehaus.jackson.map.*;
+import org.codehaus.jackson.map.module.SimpleModule;
+import org.codehaus.jackson.node.ObjectNode;
+import org.codehaus.jackson.type.TypeReference;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.text.ParseException;
+import java.util.Date;
import java.util.List;
-
-
/**
* base class that declares interface for all Atlas types.
*/
public abstract class AtlasType {
+ private static final Logger LOG = LoggerFactory.getLogger(AtlasStructType.class);
private static final ObjectMapper mapper = new ObjectMapper()
.configure(DeserializationConfig.Feature.USE_BIG_DECIMAL_FOR_FLOATS, true);
+ private static final ObjectMapper mapperV1 = new ObjectMapper()
+ .configure(DeserializationConfig.Feature.USE_BIG_DECIMAL_FOR_FLOATS, true);
+
+ static {
+ SimpleModule atlasSerDeModule = new SimpleModule("AtlasSerDe", new Version(1, 0, 0, null));
+
+ atlasSerDeModule.addSerializer(Date.class, new DateSerializer());
+ atlasSerDeModule.addDeserializer(Date.class, new DateDeserializer());
+ atlasSerDeModule.addDeserializer(HookNotification.class, new HookNotificationDeserializer());
+ atlasSerDeModule.addDeserializer(EntityNotification.class, new EntityNotificationDeserializer());
+
+ mapperV1.registerModule(atlasSerDeModule);
+ }
+
+
private final String typeName;
private final TypeCategory typeCategory;
@@ -109,6 +140,8 @@ public abstract class AtlasType {
try {
ret = mapper.writeValueAsString(obj);
}catch (IOException e){
+ LOG.error("AtlasType.toJson()", e);
+
ret = null;
}
return ret;
@@ -119,8 +152,134 @@ public abstract class AtlasType {
try {
ret = mapper.readValue(jsonStr, type);
}catch (IOException e){
+ LOG.error("AtlasType.fromJson()", e);
+
ret = null;
}
return ret;
}
+
+ public static String toV1Json(Object obj) {
+ String ret;
+ try {
+ ret = mapperV1.writeValueAsString(obj);
+ }catch (IOException e){
+ LOG.error("AtlasType.toV1Json()", e);
+
+ ret = null;
+ }
+ return ret;
+ }
+
+ public static <T> T fromV1Json(String jsonStr, Class<T> type) {
+ T ret;
+ try {
+ ret = mapperV1.readValue(jsonStr, type);
+ }catch (IOException e){
+ LOG.error("AtlasType.fromV1Json()", e);
+
+ ret = null;
+ }
+ return ret;
+ }
+
+ public static <T> T fromV1Json(String jsonStr, TypeReference<T> type) {
+ T ret;
+ try {
+ ret = mapperV1.readValue(jsonStr, type);
+ }catch (IOException e){
+ LOG.error("AtlasType.toV1Json()", e);
+
+ ret = null;
+ }
+ return ret;
+ }
+
+ static class DateSerializer extends JsonSerializer<Date> {
+ @Override
+ public void serialize(Date value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
+ if (value != null) {
+ jgen.writeString(AtlasBaseTypeDef.DATE_FORMATTER.format(value));
+ }
+ }
+ }
+
+ static class DateDeserializer extends JsonDeserializer<Date> {
+ @Override
+ public Date deserialize(JsonParser parser, DeserializationContext context) throws IOException {
+ Date ret = null;
+
+ String value = parser.readValueAs(String.class);
+
+ if (value != null) {
+ try {
+ ret = AtlasBaseTypeDef.DATE_FORMATTER.parse(value);
+ } catch (ParseException excp) {
+ }
+ }
+
+ return ret;
+ }
+ }
+
+ static class HookNotificationDeserializer extends JsonDeserializer<HookNotification> {
+ @Override
+ public HookNotification deserialize(JsonParser parser, DeserializationContext context) throws IOException {
+ HookNotification ret = null;
+ ObjectMapper mapper = (ObjectMapper) parser.getCodec();
+ ObjectNode root = (ObjectNode) mapper.readTree(parser);
+ JsonNode typeNode = root != null ? root.get("type") : null;
+ String strType = typeNode != null ? typeNode.asText() : null;
+ HookNotificationType notificationType = strType != null ? HookNotificationType.valueOf(strType) : null;
+
+ if (notificationType != null) {
+ switch (notificationType) {
+ case TYPE_CREATE:
+ case TYPE_UPDATE:
+ ret = mapper.readValue(root, TypeRequest.class);
+ break;
+
+ case ENTITY_CREATE:
+ ret = mapper.readValue(root, EntityCreateRequest.class);
+ break;
+
+ case ENTITY_PARTIAL_UPDATE:
+ ret = mapper.readValue(root, EntityPartialUpdateRequest.class);
+ break;
+
+ case ENTITY_FULL_UPDATE:
+ ret = mapper.readValue(root, EntityUpdateRequest.class);
+ break;
+
+ case ENTITY_DELETE:
+ ret = mapper.readValue(root, EntityDeleteRequest.class);
+ break;
+ }
+ }
+
+ return ret;
+ }
+ }
+
+ static class EntityNotificationDeserializer extends JsonDeserializer<EntityNotification> {
+ @Override
+ public EntityNotification deserialize(JsonParser parser, DeserializationContext context) throws IOException {
+ EntityNotification ret = null;
+ ObjectMapper mapper = (ObjectMapper) parser.getCodec();
+ ObjectNode root = (ObjectNode) mapper.readTree(parser);
+ JsonNode typeNode = root != null ? root.get("type") : null;
+ String strType = typeNode != null ? typeNode.asText() : null;
+ EntityNotificationType notificationType = strType != null ? EntityNotificationType.valueOf(strType) : EntityNotificationType.ENTITY_NOTIFICATION_V1;
+
+ if (root != null) {
+ switch (notificationType) {
+ case ENTITY_NOTIFICATION_V1:
+ ret = mapper.readValue(root, EntityNotificationV1.class);
+ break;
+ }
+ }
+
+ return ret;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java
index bd4b0e9..5f55b43 100644
--- a/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java
+++ b/intg/src/main/java/org/apache/atlas/type/AtlasTypeRegistry.java
@@ -846,8 +846,8 @@ public class AtlasTypeRegistry {
boolean alreadyLockedByCurrentThread = typeRegistryUpdateLock.isHeldByCurrentThread();
if (!alreadyLockedByCurrentThread) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("lockTypeRegistryForUpdate(): waiting for lock to be released by thread {}", lockedByThread);
+ if (lockedByThread != null) {
+ LOG.info("lockTypeRegistryForUpdate(): waiting for lock to be released by thread {}", lockedByThread);
}
} else {
LOG.warn("lockTypeRegistryForUpdate(): already locked. currentLockCount={}",
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/type/AtlasTypeUtil.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/type/AtlasTypeUtil.java b/intg/src/main/java/org/apache/atlas/type/AtlasTypeUtil.java
index 5f3cefd..80c6a0c 100644
--- a/intg/src/main/java/org/apache/atlas/type/AtlasTypeUtil.java
+++ b/intg/src/main/java/org/apache/atlas/type/AtlasTypeUtil.java
@@ -17,17 +17,29 @@
*/
package org.apache.atlas.type;
-import com.google.common.collect.ImmutableSet;
+import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId;
-import org.apache.atlas.model.typedef.*;
+import org.apache.atlas.model.instance.AtlasStruct;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.model.typedef.AtlasClassificationDef;
+import org.apache.atlas.model.typedef.AtlasEntityDef;
+import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef;
+import org.apache.atlas.model.typedef.AtlasRelationshipDef;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags;
import org.apache.atlas.model.typedef.AtlasRelationshipDef.RelationshipCategory;
+import org.apache.atlas.model.typedef.AtlasRelationshipEndDef;
+import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
+import org.apache.atlas.model.typedef.AtlasTypeDefHeader;
+import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.v1.model.typedef.AttributeDefinition;
+import org.apache.atlas.v1.model.typedef.ClassTypeDefinition;
+import org.apache.atlas.v1.model.typedef.Multiplicity;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
@@ -35,6 +47,7 @@ import org.apache.commons.lang.StringUtils;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.*;
@@ -208,19 +221,19 @@ public class AtlasTypeUtil {
return new AtlasEnumDef(name, description, "1.0", Arrays.asList(enumValues));
}
- public static AtlasClassificationDef createTraitTypeDef(String name, ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasClassificationDef createTraitTypeDef(String name, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return createTraitTypeDef(name, null, superTypes, attrDefs);
}
- public static AtlasClassificationDef createTraitTypeDef(String name, String description, ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasClassificationDef createTraitTypeDef(String name, String description, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return createTraitTypeDef(name, description, "1.0", superTypes, attrDefs);
}
- public static AtlasClassificationDef createTraitTypeDef(String name, String description, String version, ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasClassificationDef createTraitTypeDef(String name, String description, String version, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return new AtlasClassificationDef(name, description, version, Arrays.asList(attrDefs), superTypes);
}
- public static AtlasClassificationDef createAtlasClassificationDef(String name, String description, String version, ImmutableSet<String> superTypes, ImmutableSet<String> entityTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasClassificationDef createAtlasClassificationDef(String name, String description, String version, Set<String> superTypes, Set<String> entityTypes, AtlasAttributeDef... attrDefs) {
return new AtlasClassificationDef(name, description, version, Arrays.asList(attrDefs), superTypes, entityTypes, null);
}
@@ -232,18 +245,15 @@ public class AtlasTypeUtil {
return new AtlasStructDef(name, description, "1.0", Arrays.asList(attrDefs));
}
- public static AtlasEntityDef createClassTypeDef(String name,
- ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasEntityDef createClassTypeDef(String name, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return createClassTypeDef(name, null, "1.0", superTypes, attrDefs);
}
- public static AtlasEntityDef createClassTypeDef(String name, String description,
- ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasEntityDef createClassTypeDef(String name, String description, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return createClassTypeDef(name, description, "1.0", superTypes, attrDefs);
}
- public static AtlasEntityDef createClassTypeDef(String name, String description, String version,
- ImmutableSet<String> superTypes, AtlasAttributeDef... attrDefs) {
+ public static AtlasEntityDef createClassTypeDef(String name, String description, String version, Set<String> superTypes, AtlasAttributeDef... attrDefs) {
return new AtlasEntityDef(name, description, version, Arrays.asList(attrDefs), superTypes);
}
@@ -409,6 +419,129 @@ public class AtlasTypeUtil {
return sb.toString();
}
+ public static ClassTypeDefinition toClassTypeDefinition(final AtlasEntityType entityType) {
+ ClassTypeDefinition ret = null;
+
+ if (entityType != null) {
+ AtlasEntityDef entityDef = entityType.getEntityDef();
+ ret = new ClassTypeDefinition();
+ ret.setTypeName(entityDef.getName());
+ ret.setTypeDescription(entityDef.getDescription());
+ ret.setTypeVersion(entityDef.getTypeVersion());
+ ret.setSuperTypes(entityDef.getSuperTypes());
+
+ if (MapUtils.isNotEmpty(entityType.getAllAttributes())) {
+ List<AttributeDefinition> attributeDefinitions = entityType.getAllAttributes()
+ .entrySet()
+ .stream()
+ .map(e -> toV1AttributeDefinition(e.getValue()))
+ .collect(Collectors.toList());
+
+ ret.setAttributeDefinitions(attributeDefinitions);
+ }
+ }
+
+ return ret;
+ }
+
+ public static AttributeDefinition toV1AttributeDefinition(AtlasStructType.AtlasAttribute attribute) {
+ AtlasAttributeDef attributeDef = attribute.getAttributeDef();
+ AttributeDefinition ret = new AttributeDefinition();
+
+ ret.setName(attributeDef.getName());
+ ret.setDataTypeName(attributeDef.getTypeName());
+ ret.setIsUnique(attributeDef.getIsUnique());
+ ret.setIsIndexable(attributeDef.getIsIndexable());
+ ret.setIsComposite(attribute.isOwnedRef());
+ ret.setReverseAttributeName(attribute.getInverseRefAttributeName());
+ ret.setDefaultValue(attributeDef.getDefaultValue());
+ ret.setDescription(attributeDef.getDescription());
+
+ final int lower;
+ final int upper;
+
+ if (attributeDef.getCardinality() == AtlasAttributeDef.Cardinality.SINGLE) {
+ lower = attributeDef.getIsOptional() ? 0 : 1;
+ upper = 1;
+ } else {
+ if(attributeDef.getIsOptional()) {
+ lower = 0;
+ } else {
+ lower = attributeDef.getValuesMinCount() < 1 ? 1 : attributeDef.getValuesMinCount();
+ }
+
+ upper = attributeDef.getValuesMaxCount() < 2 ? Integer.MAX_VALUE : attributeDef.getValuesMaxCount();
+ }
+
+ Multiplicity multiplicity = new Multiplicity();
+ multiplicity.setLower(lower);
+ multiplicity.setUpper(upper);
+ multiplicity.setIsUnique(AtlasAttributeDef.Cardinality.SET.equals(attributeDef.getCardinality()));
+
+ ret.setMultiplicity(multiplicity);
+
+ return ret;
+ }
+
+ public static Map<String, Object> toMap(AtlasEntity entity) {
+ Map<String, Object> ret = null;
+
+ if (entity != null) {
+ ret = new LinkedHashMap<>();
+
+ // Id type
+ ret.put("$typeName$", entity.getTypeName());
+ ret.put("$id$", new LinkedHashMap<String, Object>(){{
+ put("id", entity.getGuid());
+ put("$typeName$", entity.getTypeName());
+ put("version", entity.getVersion().intValue());
+ put("state", entity.getStatus().name());
+ }});
+
+ // System attributes
+ ret.put("$systemAttributes$", new LinkedHashMap<String, String>() {{
+ put("createdBy", entity.getCreatedBy());
+ put("modifiedBy", entity.getUpdatedBy());
+ put("createdTime", entity.getCreateTime().toString());
+ put("modifiedTime", entity.getUpdateTime().toString());
+ }});
+
+ // Traits
+ if (CollectionUtils.isNotEmpty(entity.getClassifications())) {
+ Map<String, HashMap> traitDetails = entity.getClassifications()
+ .stream()
+ .collect(Collectors.toMap(AtlasStruct::getTypeName, AtlasTypeUtil::getNestedTraitDetails));
+ ret.put("$traits$", traitDetails);
+ }
+
+ // All attributes
+ if (MapUtils.isNotEmpty(entity.getAttributes())) {
+ for (Map.Entry<String, Object> entry : entity.getAttributes().entrySet()) {
+ if (entry.getValue() instanceof AtlasObjectId) {
+ ret.put(entry.getKey(), new LinkedHashMap<String, Object>(){{
+ put("id", ((AtlasObjectId) entry.getValue()).getGuid());
+ put("$typeName$", ((AtlasObjectId) entry.getValue()).getTypeName());
+// put("version", entity.getVersion().intValue());
+// put("state", entity.getStatus().name());
+ }});
+ } else {
+ ret.put(entry.getKey(), entry.getValue());
+ }
+ }
+ }
+
+ }
+
+ return ret;
+ }
+
+ private static HashMap getNestedTraitDetails(final AtlasClassification atlasClassification) {
+ return new HashMap<String, Object>() {{
+ put("$typeName$", atlasClassification.getTypeName());
+ putAll(atlasClassification.getAttributes());
+ }};
+ }
+
private static void dumpTypeNames(List<? extends AtlasBaseTypeDef> typeDefs, StringBuilder sb) {
if (CollectionUtils.isNotEmpty(typeDefs)) {
for (int i = 0; i < typeDefs.size(); i++) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java b/intg/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
new file mode 100644
index 0000000..dba2d88
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.typesystem.types;
+
+/*
+ * this enum must be in package org.apache.atlas.typesystem.types, since vertex property in GraphDB has reference to this type
+ */
+public class DataTypes {
+ public enum TypeCategory {
+ PRIMITIVE,
+ ENUM,
+ ARRAY,
+ MAP,
+ STRUCT,
+ TRAIT,
+ CLASS,
+ RELATIONSHIP
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/instance/AtlasSystemAttributes.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/instance/AtlasSystemAttributes.java b/intg/src/main/java/org/apache/atlas/v1/model/instance/AtlasSystemAttributes.java
new file mode 100644
index 0000000..0b74365
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/instance/AtlasSystemAttributes.java
@@ -0,0 +1,156 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.instance;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.Date;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class AtlasSystemAttributes implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String createdBy;
+ private String modifiedBy;
+ private Date createdTime;
+ private Date modifiedTime;
+
+
+ public AtlasSystemAttributes() {
+ }
+
+ public AtlasSystemAttributes(AtlasSystemAttributes that) {
+ if (that != null) {
+ this.createdBy = that.createdBy;
+ this.modifiedBy = that.modifiedBy;
+ this.createdTime = that.createdTime;
+ this.modifiedTime = that.modifiedTime;
+ }
+ }
+
+ public AtlasSystemAttributes(String createdBy, String modifiedBy, Date createdTime, Date modifiedTime){
+ this.createdBy = createdBy;
+ this.modifiedBy = modifiedBy;
+ this.createdTime = createdTime;
+ this.modifiedTime = modifiedTime;
+ }
+
+ public AtlasSystemAttributes(Map<String, Object> map) {
+ this();
+
+ if (map != null) {
+ this.createdBy = Id.asString(map.get("createdBy"));
+ this.modifiedBy = Id.asString(map.get("modifiedBy"));
+ this.createdTime = Id.asDate(map.get("createdTime"));
+ this.modifiedTime = Id.asDate(map.get("modifiedTime"));
+ }
+ }
+
+ public String getCreatedBy(){
+ return createdBy;
+ }
+
+ public void setCreatedBy(String createdBy) {
+ this.createdBy = createdBy;
+ }
+
+ public String getModifiedBy(){
+ return modifiedBy;
+ }
+
+ public void setModifiedBy(String modifiedBy) {
+ this.modifiedBy = modifiedBy;
+ }
+
+ public Date getCreatedTime(){
+ return createdTime;
+ }
+
+ public void setCreatedTime(Date createdTime) {
+ this.createdTime = createdTime;
+ }
+
+ public Date getModifiedTime(){
+ return modifiedTime;
+ }
+
+ public void setModifiedTime(Date modifiedTime) {
+ this.modifiedTime = modifiedTime;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ AtlasSystemAttributes obj = (AtlasSystemAttributes) o;
+
+ return Objects.equals(createdBy, obj.createdBy) &&
+ Objects.equals(modifiedBy, obj.modifiedBy) &&
+ Objects.equals(createdTime, obj.createdTime) &&
+ Objects.equals(modifiedTime, obj.modifiedTime);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(createdBy, modifiedBy, createdTime, modifiedTime);
+ }
+
+
+ @Override
+ public String toString() {
+ return toString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("AtlasSystemAttributes{")
+ .append("createdBy=").append(createdBy)
+ .append(", modifiedBy=").append(modifiedBy)
+ .append(", createdTime=").append(createdTime)
+ .append(", modifiedTime=").append(modifiedTime)
+ .append("}");
+
+ return sb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/instance/Id.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/instance/Id.java b/intg/src/main/java/org/apache/atlas/v1/model/instance/Id.java
new file mode 100644
index 0000000..f3087d1
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/instance/Id.java
@@ -0,0 +1,270 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.instance;
+
+
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.text.ParseException;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.atomic.AtomicLong;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class Id implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ @JsonIgnore
+ private static AtomicLong s_nextId = new AtomicLong(System.nanoTime());
+
+ public static final String JSON_CLASS_ID = "org.apache.atlas.typesystem.json.InstanceSerialization$_Id";
+
+ public enum EntityState { ACTIVE, DELETED }
+
+ private String id;
+ private String typeName;
+ private int version;
+ private EntityState state;
+
+
+ public Id() {
+ }
+
+ public Id(Id that) {
+ if (that != null) {
+ this.id = that.id;
+ this.typeName = that.typeName;
+ this.version = that.version;
+ this.state = that.state;
+ }
+ }
+
+ public Id(String typeName) {
+ this("" + nextNegativeLong(), 0, typeName);
+ }
+
+ public Id(String id, int version, String typeName) {
+ this(id, version, typeName, null);
+ }
+
+ public Id(long id, int version, String typeName) {
+ this(id, version, typeName, null);
+ }
+
+ public Id(long id, int version, String typeName, String state) {
+ this("" + id, version, typeName, state);
+ }
+
+ public Id(String id, int version, String typeName, String state) {
+ this.id = id;
+ this.typeName = typeName;
+ this.version = version;
+ this.state = state == null ? EntityState.ACTIVE : EntityState.valueOf(state.toUpperCase());
+ }
+
+ public Id(Map<String, Object> map) {
+ this();
+
+ if (map != null) {
+ this.id = Id.asString(map.get("id"));
+ this.typeName = Id.asString(map.get("typeName"));
+ this.version = Id.asInt(map.get("id"));
+ this.state = Id.asEntityState(map.get("state"));
+ }
+ }
+
+ // for serialization backward compatibility
+ public String getJsonClass() {
+ return JSON_CLASS_ID;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
+ }
+
+ public int getVersion() {
+ return version;
+ }
+
+ public void setVersion(int version) {
+ this.version = version;
+ }
+
+ public EntityState getState() {
+ return state;
+ }
+
+ public void setState(EntityState state) {
+ this.state = state;
+ }
+
+ @JsonIgnore
+ public String _getId() {
+ return id;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ Id obj = (Id) o;
+
+ return version == obj.version &&
+ Objects.equals(id, obj.id) &&
+ Objects.equals(typeName, obj.typeName) &&
+ Objects.equals(state, obj.state);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, typeName, version, state);
+ }
+
+
+ @Override
+ public String toString() {
+ return asString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder asString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("Id{")
+ .append("id=").append(id)
+ .append(", typeName=").append(typeName)
+ .append(", version=").append(version)
+ .append(", state=").append(state)
+ .append("}");
+
+ return sb;
+ }
+
+ private static long nextNegativeLong() {
+ long ret = s_nextId.getAndDecrement();
+
+ if (ret > 0) {
+ ret *= -1;
+ } else if (ret == 0) {
+ ret = Long.MIN_VALUE;
+ }
+
+ return ret;
+ }
+
+ static String asString(Object val) {
+ return val == null ? null : val.toString();
+ }
+
+ static int asInt(Object val) {
+ if (val != null) {
+ if (val instanceof Number) {
+ return ((Number)val).intValue();
+ }
+
+ try {
+ return Integer.parseInt(val.toString());
+ } catch (NumberFormatException excp) {
+ // ignore
+ }
+ }
+
+ return 0;
+ }
+
+ static Date asDate(Object val) {
+ if (val != null) {
+ if (val instanceof Date) {
+ return (Date) val;
+ } else if (val instanceof Number) {
+ return new Date(((Number)val).longValue());
+ }
+
+ try {
+ return AtlasBaseTypeDef.DATE_FORMATTER.parse(val.toString());
+ } catch (ParseException excp) {
+ // ignore
+ }
+ }
+
+ return null;
+ }
+
+ static EntityState asEntityState(Object val) {
+ if (val != null) {
+ if (val instanceof EntityState) {
+ return (EntityState) val;
+ }
+
+ try {
+ return EntityState.valueOf(val.toString());
+ } catch (Exception excp) {
+ // ignore
+ }
+ }
+
+ return EntityState.ACTIVE;
+ }
+
+ static Map asMap(Object val) {
+ return (val instanceof Map) ? ((Map) val) : null;
+ }
+
+ static List asList(Object val) {
+ return (val instanceof List) ? ((List) val) : null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/instance/Referenceable.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/instance/Referenceable.java b/intg/src/main/java/org/apache/atlas/v1/model/instance/Referenceable.java
new file mode 100644
index 0000000..f1d28d1
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/instance/Referenceable.java
@@ -0,0 +1,252 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.instance;
+
+
+
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.commons.collections.MapUtils;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonFilter;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class Referenceable extends Struct implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public static final String JSON_CLASS_REFERENCE = "org.apache.atlas.typesystem.json.InstanceSerialization$_Reference";
+
+ private Id id;
+ private Map<String, Struct> traits = new HashMap<>();
+ private List<String> traitNames = new ArrayList<>();
+ private AtlasSystemAttributes systemAttributes;
+
+
+ public Referenceable() {
+ super();
+ }
+
+ public Referenceable(Referenceable that) {
+ super(that);
+
+ if (that != null) {
+ this.id = new Id(that.id);
+
+ if (that.traits != null) {
+ this.traits.putAll(that.traits);
+ }
+
+ if (that.traitNames != null) {
+ this.traitNames.addAll(that.traitNames);
+ }
+
+ this.systemAttributes = new AtlasSystemAttributes(that.systemAttributes);
+ }
+ }
+
+ public Referenceable(String typeName, String... traitNames) {
+ super(typeName);
+
+ this.id = new Id(typeName);
+ this.systemAttributes = null;
+
+ if (traitNames != null) {
+ for (String traitName : traitNames) {
+ this.traitNames.add(traitName);
+ this.traits.put(traitName, new Struct(traitName));
+ }
+ }
+ }
+
+ public Referenceable(String typeName, Map<String, Object> values) {
+ this(new Id(typeName), typeName, values, null, null);
+ }
+
+ public Referenceable(String guid, String typeName, Map<String, Object> values) {
+ this(new Id(guid, 0, typeName), typeName, values, null, null, null);
+ }
+
+ public Referenceable(String guid, String typeName, Map<String, Object> values, AtlasSystemAttributes systemAttributes) {
+ this(new Id(guid, 0, typeName), typeName, values, systemAttributes, null, null);
+ }
+
+ public Referenceable(String guid, String typeName, Map<String, Object> values, AtlasSystemAttributes systemAttributes, List<String> traitNames, Map<String, Struct> traits) {
+ this(new Id(guid, 0, typeName), typeName, values, systemAttributes, traitNames, traits);
+ }
+
+ public Referenceable(String guid, String typeName, Map<String, Object> values, List<String> traitNames, Map<String, Struct> traits) {
+ this(new Id(guid, 0, typeName), typeName, values, null, traitNames, traits);
+ }
+
+ public Referenceable(Id id, String typeName, Map<String, Object> values, List<String> traitNames, Map<String, Struct> traits) {
+ this(id, typeName, values, null, traitNames, traits);
+ }
+
+ public Referenceable(Id id, String typeName, Map<String, Object> values, AtlasSystemAttributes systemAttributes, List<String> traitNames, Map<String, Struct> traits) {
+ super(typeName, values);
+
+ this.id = id;
+ this.systemAttributes = systemAttributes;
+
+ if (traitNames != null) {
+ this.traitNames = traitNames;
+ }
+
+ if (traits != null) {
+ this.traits = traits;
+ }
+ }
+
+ public Referenceable(Map<String, Object> map) {
+ super(map);
+
+ if (map != null) {
+ this.id = new Id((Map)map.get("id"));
+ this.traitNames = Id.asList(map.get("traitNames"));
+ this.systemAttributes = new AtlasSystemAttributes((Map) map.get("systemAttributes"));
+
+ Map traits = Id.asMap(map.get("traits"));
+
+ if (MapUtils.isNotEmpty(traits)) {
+ this.traits = new HashMap<>(traits.size());
+
+ for (Object key : traits.keySet()) {
+ this.traits.put(Id.asString(key), new Struct(Id.asMap(traits.get(key))));
+ }
+ }
+ }
+ }
+
+
+ // for serialization backward compatibility
+ public String getJsonClass() {
+ return JSON_CLASS_REFERENCE;
+ }
+
+ public Id getId() {
+ return id;
+ }
+
+ public void setId(Id id) {
+ this.id = id;
+ }
+
+ public Map<String, Struct> getTraits() {
+ return traits;
+ }
+
+ public void setTraits(Map<String, Struct> traits) {
+ this.traits = traits;
+ }
+
+ public List<String> getTraitNames() {
+ return traitNames;
+ }
+
+ public void setTraitNames(List<String> traitNames) {
+ this.traitNames = traitNames;
+ }
+
+ public AtlasSystemAttributes getSystemAttributes() {
+ return systemAttributes;
+ }
+
+ public void setSystemAttributes(AtlasSystemAttributes systemAttributes) {
+ this.systemAttributes = systemAttributes;
+ }
+
+ @JsonIgnore
+ public Struct getTrait(String name) {
+ return traits != null ? traits.get(name) : null;
+ }
+
+ @JsonIgnore
+ public String toShortString() {
+ return String.format("entity[type=%s guid=%s]", getTypeName(), id._getId());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+
+ Referenceable obj = (Referenceable)o;
+
+ return Objects.equals(id, obj.id) &&
+ Objects.equals(traits, obj.traits) &&
+ Objects.equals(traitNames, obj.traitNames) &&
+ Objects.equals(systemAttributes, obj.systemAttributes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id, traits, traitNames, systemAttributes);
+ }
+
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("Referenceable{");
+ super.toString(sb);
+ sb.append(", id=");
+ if (id != null) {
+ id.asString(sb);
+ }
+ sb.append(", triats={");
+ AtlasBaseTypeDef.dumpObjects(this.traits, sb);
+ sb.append("}, traitNames=[");
+ AtlasBaseTypeDef.dumpObjects(traitNames, sb);
+ sb.append("], systemAttributes=");
+ if (systemAttributes != null) {
+ systemAttributes.toString(sb);
+ }
+ sb.append("}");
+
+ return sb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/instance/Struct.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/instance/Struct.java b/intg/src/main/java/org/apache/atlas/v1/model/instance/Struct.java
new file mode 100644
index 0000000..5aebd4b
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/instance/Struct.java
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.instance;
+
+
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.commons.collections.MapUtils;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.*;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class Struct implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public static final String JSON_CLASS_STRUCT = "org.apache.atlas.typesystem.json.InstanceSerialization$_Struct";
+
+ private String typeName;
+ private Map<String, Object> values;
+
+
+ public Struct() {
+ }
+
+ public Struct(Struct that) {
+ if (that != null) {
+ this.typeName = that.typeName;
+
+ if (that.values != null) {
+ this.values = new HashMap<>(that.values);
+ }
+ }
+ }
+
+ public Struct(String typeName) {
+ this(typeName, null);
+ }
+
+ public Struct(String typeName, Map<String, Object> values) {
+ this.typeName = typeName;
+ this.values = values;
+ }
+
+ public Struct(Map<String, Object> map) {
+ this();
+
+ if (map != null) {
+ this.typeName = Id.asString(map.get("typeName"));
+ this.values = Id.asMap(map.get("values"));
+
+ this.normailze();
+ }
+ }
+
+ // for serialization backward compatibility
+ public String getJsonClass() {
+ return JSON_CLASS_STRUCT;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
+ }
+
+ public Map<String, Object> getValues() {
+ return values;
+ }
+
+ public void setValues(Map<String, Object> values) {
+ this.values = values;
+ }
+
+ @JsonIgnore
+ public Map<String, Object> getValuesMap() {
+ return values;
+ }
+
+ @JsonIgnore
+ public void set(String attrName, Object attrValue) {
+ if (values == null) {
+ values = new HashMap<>();
+ }
+
+ values.put(attrName, attrValue);
+ }
+
+ @JsonIgnore
+ public Object get(String attrName) {
+ return values != null ? values.get(attrName) : null;
+ }
+
+ @JsonIgnore
+ public void setNull(String attrName) {
+ if (values != null) {
+ values.remove(attrName);
+ }
+ }
+
+ public void normailze() {
+ if (MapUtils.isEmpty(values)) {
+ return;
+ }
+
+ for (Map.Entry<String, Object> entry : values.entrySet()) {
+ entry.setValue(normalizeAttributeValue(entry.getValue()));
+ }
+ }
+
+ private Object normalizeAttributeValue(Object value) {
+ if (value instanceof Map) {
+ Map mapValue = (Map) value;
+ String jsonClass = (String)mapValue.get("jsonClass");
+
+ if (jsonClass != null) {
+ if (Id.JSON_CLASS_ID.equals(jsonClass)) {
+ value = new Id(mapValue);
+ } else if (Struct.JSON_CLASS_STRUCT.equals(jsonClass)) {
+ value = new Struct(mapValue);
+ } else if (Referenceable.JSON_CLASS_REFERENCE.equals(jsonClass)) {
+ value = new Referenceable(mapValue);
+ }
+ }
+ } else if (value instanceof List) {
+ List<Object> listValue = (List) value;
+ List<Object> normalizedValue = new ArrayList<>(listValue.size());
+
+ for (Object val : listValue) {
+ normalizedValue.add(normalizeAttributeValue(val));
+ }
+
+ value = normalizedValue;
+ }
+
+ return value;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || o.getClass() != getClass()) {
+ return false;
+ }
+
+ Struct obj = (Struct)o;
+
+ return Objects.equals(typeName, obj.typeName) &&
+ Objects.equals(values, obj.values);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(typeName, values);
+ }
+
+ @Override
+ public String toString() {
+ return toString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("Struct{");
+ sb.append("typeName=").append(typeName);
+ sb.append(", values={");
+ AtlasBaseTypeDef.dumpObjects(values, sb);
+ sb.append("}");
+ sb.append("}");
+
+ return sb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/lineage/DataSetLineageResponse.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/lineage/DataSetLineageResponse.java b/intg/src/main/java/org/apache/atlas/v1/model/lineage/DataSetLineageResponse.java
new file mode 100644
index 0000000..b073bac
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/lineage/DataSetLineageResponse.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.v1.model.lineage;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class DataSetLineageResponse extends LineageResponse {
+ private String tableName;
+
+ public DataSetLineageResponse() {
+ }
+
+ public DataSetLineageResponse(final DataSetLineageResponse other) {
+ super(other);
+ this.tableName = other.tableName;
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public void setTableName(final String tableName) {
+ this.tableName = tableName;
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ if (!super.equals(o)) return false;
+ final DataSetLineageResponse that = (DataSetLineageResponse) o;
+ return Objects.equals(tableName, that.tableName);
+ }
+
+ @Override
+ public int hashCode() {
+
+ return Objects.hash(super.hashCode(), tableName);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/lineage/LineageResponse.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/lineage/LineageResponse.java b/intg/src/main/java/org/apache/atlas/v1/model/lineage/LineageResponse.java
new file mode 100644
index 0000000..aadbba1
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/lineage/LineageResponse.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.v1.model.lineage;
+
+import org.apache.atlas.v1.model.instance.Struct;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class LineageResponse {
+ private String requestId;
+ private Struct results;
+
+ public LineageResponse() {
+ }
+
+ public LineageResponse(final LineageResponse other) {
+ this.requestId = other.requestId;
+ this.results = other.results;
+ }
+
+ public Struct getResults() {
+ return results;
+ }
+
+ public void setResults(final Struct results) {
+ this.results = results;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public void setRequestId(final String requestId) {
+ this.requestId = requestId;
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ final LineageResponse that = (LineageResponse) o;
+ return Objects.equals(requestId, that.requestId) &&
+ Objects.equals(results, that.results);
+ }
+
+ @Override
+ public int hashCode() {
+
+ return Objects.hash(requestId, results);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/lineage/SchemaResponse.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/lineage/SchemaResponse.java b/intg/src/main/java/org/apache/atlas/v1/model/lineage/SchemaResponse.java
new file mode 100644
index 0000000..e17553e
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/lineage/SchemaResponse.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.v1.model.lineage;
+
+import org.apache.atlas.v1.model.typedef.ClassTypeDefinition;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+@JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class SchemaResponse {
+ private String requestId;
+ private String tableName;
+ private SchemaDetails results;
+
+ public SchemaResponse() {
+ }
+
+ public String getTableName() {
+ return tableName;
+ }
+
+ public void setTableName(final String tableName) {
+ this.tableName = tableName;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public void setRequestId(final String requestId) {
+ this.requestId = requestId;
+ }
+
+ public SchemaDetails getResults() {
+ return results;
+ }
+
+ public void setResults(final SchemaDetails results) {
+ this.results = results;
+ }
+
+ /**
+ * Represents the column schema for a given hive table
+ */
+ @JsonAutoDetect(getterVisibility = PUBLIC_ONLY, setterVisibility = PUBLIC_ONLY, fieldVisibility = NONE)
+ @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+ @JsonIgnoreProperties(ignoreUnknown = true)
+ public static class SchemaDetails {
+ private ClassTypeDefinition dataType; // Data type of the (hive) columns
+ private List<Map<String, Object>> rows; // Column instances for the given table
+
+ public SchemaDetails() {
+ }
+
+ public SchemaDetails(final SchemaDetails other) {
+ this.dataType = other.dataType;
+ this.rows = other.rows;
+ }
+
+ public ClassTypeDefinition getDataType() {
+ return dataType;
+ }
+
+ public void setDataType(final ClassTypeDefinition dataType) {
+ this.dataType = dataType;
+ }
+
+ public List<Map<String, Object>> getRows() {
+ return rows;
+ }
+
+ public void setRows(final List<Map<String, Object>> rows) {
+ this.rows = rows;
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ final SchemaDetails that = (SchemaDetails) o;
+ return Objects.equals(dataType, that.dataType) &&
+ Objects.equals(rows, that.rows);
+ }
+
+ @Override
+ public int hashCode() {
+
+ return Objects.hash(dataType, rows);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/notification/EntityNotificationV1.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/notification/EntityNotificationV1.java b/intg/src/main/java/org/apache/atlas/v1/model/notification/EntityNotificationV1.java
new file mode 100644
index 0000000..549dbe3
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/notification/EntityNotificationV1.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.v1.model.notification;
+
+import org.apache.atlas.model.notification.EntityNotification;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasClassificationType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnore;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Entity notification
+ */
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class EntityNotificationV1 extends EntityNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public enum OperationType {
+ ENTITY_CREATE,
+ ENTITY_UPDATE,
+ ENTITY_DELETE,
+ TRAIT_ADD,
+ TRAIT_DELETE,
+ TRAIT_UPDATE
+ }
+
+ private Referenceable entity;
+ private OperationType operationType;
+ private List<Struct> traits;
+
+
+ // ----- Constructors ------------------------------------------------------
+
+ /**
+ * No-arg constructor for serialization.
+ */
+ public EntityNotificationV1() {
+ }
+
+ /**
+ * Construct an EntityNotificationV1.
+ *
+ * @param entity the entity subject of the notification
+ * @param operationType the type of operation that caused the notification
+ * @param traits the traits for the given entity
+ */
+ public EntityNotificationV1(Referenceable entity, OperationType operationType, List<Struct> traits) {
+ this.entity = entity;
+ this.operationType = operationType;
+ this.traits = traits;
+ }
+
+ /**
+ * Construct an EntityNotificationV1.
+ *
+ * @param entity the entity subject of the notification
+ * @param operationType the type of operation that caused the notification
+ * @param typeRegistry the Atlas type system
+ */
+ public EntityNotificationV1(Referenceable entity, OperationType operationType, AtlasTypeRegistry typeRegistry) {
+ this(entity, operationType, getAllTraits(entity, typeRegistry));
+ }
+
+ public Referenceable getEntity() {
+ return entity;
+ }
+
+ public void setEntity(Referenceable entity) {
+ this.entity = entity;
+ }
+
+ public OperationType getOperationType() {
+ return operationType;
+ }
+
+ public void setOperationType(OperationType operationType) {
+ this.operationType = operationType;
+ }
+
+ public List<Struct> getTraits() {
+ return traits;
+ }
+
+ public void setTraits(List<Struct> traits) {
+ this.traits = traits;
+ }
+
+ @JsonIgnore
+ public List<Struct> getAllTraits() {
+ return traits;
+ }
+
+ public void normalize() {
+ super.normalize();
+
+ if (entity != null) {
+ entity.normailze();
+ }
+
+ if (traits != null) {
+ for (Struct trait : traits) {
+ if (trait != null) {
+ trait.normailze();
+ }
+ }
+ }
+ }
+
+ // ----- Object overrides --------------------------------------------------
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ EntityNotificationV1 that = (EntityNotificationV1) o;
+ return Objects.equals(entity, that.entity) &&
+ operationType == that.operationType &&
+ Objects.equals(traits, that.traits);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(entity, operationType, traits);
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityNotificationV1{");
+ super.toString(sb);
+ sb.append(", entity=");
+ if (entity != null) {
+ entity.toString(sb);
+ } else {
+ sb.append(entity);
+ }
+ sb.append(", operationType=").append(operationType);
+ sb.append(", traits=[");
+ AtlasBaseTypeDef.dumpObjects(traits, sb);
+ sb.append("]");
+ sb.append("}");
+
+ return sb;
+ }
+
+
+ // ----- helper methods ----------------------------------------------------
+
+ private static List<Struct> getAllTraits(Referenceable entityDefinition, AtlasTypeRegistry typeRegistry) {
+ List<Struct> ret = new LinkedList<>();
+
+ for (String traitName : entityDefinition.getTraitNames()) {
+ Struct trait = entityDefinition.getTrait(traitName);
+ AtlasClassificationType traitType = typeRegistry.getClassificationTypeByName(traitName);
+ Set<String> superTypeNames = traitType != null ? traitType.getAllSuperTypes() : null;
+
+ ret.add(trait);
+
+ if (CollectionUtils.isNotEmpty(superTypeNames)) {
+ for (String superTypeName : superTypeNames) {
+ Struct superTypeTrait = new Struct(superTypeName);
+
+ if (MapUtils.isNotEmpty(trait.getValues())) {
+ AtlasClassificationType superType = typeRegistry.getClassificationTypeByName(superTypeName);
+
+ if (superType != null && MapUtils.isNotEmpty(superType.getAllAttributes())) {
+ Map<String, Object> superTypeTraitAttributes = new HashMap<>();
+
+ for (Map.Entry<String, Object> attrEntry : trait.getValues().entrySet()) {
+ String attrName = attrEntry.getKey();
+
+ if (superType.getAllAttributes().containsKey(attrName)) {
+ superTypeTraitAttributes.put(attrName, attrEntry.getValue());
+ }
+ }
+
+ superTypeTrait.setValues(superTypeTraitAttributes);
+ }
+ }
+
+ ret.add(superTypeTrait);
+ }
+ }
+ }
+
+ return ret;
+ }
+}
[31/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java b/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java
deleted file mode 100644
index f0fef1f..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/DeleteHandler.java
+++ /dev/null
@@ -1,468 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import static org.apache.atlas.repository.graph.GraphHelper.EDGE_LABEL_PREFIX;
-import static org.apache.atlas.repository.graph.GraphHelper.string;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graph.GraphHelper.VertexInfo;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.exception.NullRequiredAttributeException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class DeleteHandler {
- public static final Logger LOG = LoggerFactory.getLogger(DeleteHandler.class);
-
- protected static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- protected TypeSystem typeSystem;
- private boolean shouldUpdateReverseAttribute;
- private boolean softDelete;
-
- public DeleteHandler(TypeSystem typeSystem, boolean shouldUpdateReverseAttribute, boolean softDelete) {
- this.typeSystem = typeSystem;
- this.shouldUpdateReverseAttribute = shouldUpdateReverseAttribute;
- this.softDelete = softDelete;
- }
-
- /**
- * Deletes the specified entity vertices.
- * Deletes any traits, composite entities, and structs owned by each entity.
- * Also deletes all the references from/to the entity.
- *
- * @param instanceVertices
- * @throws AtlasException
- */
- public void deleteEntities(Collection<AtlasVertex> instanceVertices) throws AtlasException {
- RequestContext requestContext = RequestContext.get();
-
- Set<AtlasVertex> deletionCandidateVertices = new HashSet<>();
-
- for (AtlasVertex instanceVertex : instanceVertices) {
- String guid = GraphHelper.getGuid(instanceVertex);
- Id.EntityState state = GraphHelper.getState(instanceVertex);
- if (requestContext.getDeletedEntityIds().contains(guid) || state == Id.EntityState.DELETED) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Skipping deletion of {} as it is already deleted", guid);
- }
-
- continue;
- }
-
- // Get GUIDs and vertices for all deletion candidates.
- Set<VertexInfo> compositeVertices = graphHelper.getCompositeVertices(instanceVertex);
-
- // Record all deletion candidate GUIDs in RequestContext
- // and gather deletion candidate vertices.
- for (VertexInfo vertexInfo : compositeVertices) {
- requestContext.recordEntityDelete(vertexInfo.getGuid(), vertexInfo.getTypeName());
- deletionCandidateVertices.add(vertexInfo.getVertex());
- }
- }
-
- // Delete traits and vertices.
- for (AtlasVertex deletionCandidateVertex : deletionCandidateVertices) {
- deleteAllTraits(deletionCandidateVertex);
- deleteTypeVertex(deletionCandidateVertex, false);
- }
- }
-
- protected abstract void deleteEdge(AtlasEdge edge, boolean force) throws AtlasException;
-
- /**
- * Deletes a type vertex - can be entity(class type) or just vertex(struct/trait type)
- * @param instanceVertex
- * @param typeCategory
- * @throws AtlasException
- */
- protected void deleteTypeVertex(AtlasVertex instanceVertex, DataTypes.TypeCategory typeCategory, boolean force) throws AtlasException {
- switch (typeCategory) {
- case STRUCT:
- case TRAIT:
- deleteTypeVertex(instanceVertex, force);
- break;
-
- case CLASS:
- deleteEntities(Collections.singletonList(instanceVertex));
- break;
-
- default:
- throw new IllegalStateException("Type category " + typeCategory + " not handled");
- }
- }
-
- /**
- * Deleting any type vertex. Goes over the complex attributes and removes the references
- * @param instanceVertex
- * @throws AtlasException
- */
- protected void deleteTypeVertex(AtlasVertex instanceVertex, boolean force) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting {}", string(instanceVertex));
- }
-
- String typeName = GraphHelper.getTypeName(instanceVertex);
- IDataType type = typeSystem.getDataType(IDataType.class, typeName);
- FieldMapping fieldMapping = getFieldMapping(type);
-
- for (AttributeInfo attributeInfo : fieldMapping.fields.values()) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting attribute {} for {}", attributeInfo.name, string(instanceVertex));
- }
-
- String edgeLabel = GraphHelper.getEdgeLabel(type, attributeInfo);
-
- switch (attributeInfo.dataType().getTypeCategory()) {
- case CLASS:
- //If its class attribute, delete the reference
- deleteEdgeReference(instanceVertex, edgeLabel, DataTypes.TypeCategory.CLASS, attributeInfo.isComposite);
- break;
-
- case STRUCT:
- //If its struct attribute, delete the reference
- deleteEdgeReference(instanceVertex, edgeLabel, DataTypes.TypeCategory.STRUCT, false);
- break;
-
- case ARRAY:
- //For array attribute, if the element is struct/class, delete all the references
- IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
- DataTypes.TypeCategory elementTypeCategory = elementType.getTypeCategory();
- if (elementTypeCategory == DataTypes.TypeCategory.STRUCT ||
- elementTypeCategory == DataTypes.TypeCategory.CLASS) {
- Iterator<AtlasEdge> edges = graphHelper.getOutGoingEdgesByLabel(instanceVertex, edgeLabel);
- if (edges != null) {
- while (edges.hasNext()) {
- AtlasEdge edge = edges.next();
- deleteEdgeReference(edge, elementType.getTypeCategory(), attributeInfo.isComposite, false);
- }
- }
- }
- break;
-
- case MAP:
- //For map attribute, if the value type is struct/class, delete all the references
- DataTypes.MapType mapType = (DataTypes.MapType) attributeInfo.dataType();
- DataTypes.TypeCategory valueTypeCategory = mapType.getValueType().getTypeCategory();
- String propertyName = GraphHelper.getQualifiedFieldName(type, attributeInfo.name);
-
- if (valueTypeCategory == DataTypes.TypeCategory.STRUCT ||
- valueTypeCategory == DataTypes.TypeCategory.CLASS) {
- List<String> keys = GraphHelper.getListProperty(instanceVertex, propertyName);
- if (keys != null) {
- for (String key : keys) {
- String mapEdgeLabel = GraphHelper.getQualifiedNameForMapKey(edgeLabel, key);
- deleteEdgeReference(instanceVertex, mapEdgeLabel, valueTypeCategory, attributeInfo.isComposite);
- }
- }
- }
- }
- }
-
- deleteVertex(instanceVertex, force);
- }
-
- /**
- * Force delete is used to remove struct/trait in case of entity updates
- * @param edge
- * @param typeCategory
- * @param isComposite
- * @param forceDeleteStructTrait
- * @return returns true if the edge reference is hard deleted
- * @throws AtlasException
- */
- public boolean deleteEdgeReference(AtlasEdge edge, DataTypes.TypeCategory typeCategory, boolean isComposite,
- boolean forceDeleteStructTrait) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting {}", string(edge));
- }
-
- boolean forceDelete =
- (typeCategory == DataTypes.TypeCategory.STRUCT || typeCategory == DataTypes.TypeCategory.TRAIT) && forceDeleteStructTrait;
- if (typeCategory == DataTypes.TypeCategory.STRUCT || typeCategory == DataTypes.TypeCategory.TRAIT
- || (typeCategory == DataTypes.TypeCategory.CLASS && isComposite)) {
- //If the vertex is of type struct/trait, delete the edge and then the reference vertex as the vertex is not shared by any other entities.
- //If the vertex is of type class, and its composite attribute, this reference vertex' lifecycle is controlled
- //through this delete, hence delete the edge and the reference vertex.
- AtlasVertex vertexForDelete = edge.getInVertex();
-
- //If deleting the edge and then the in vertex, reverse attribute shouldn't be updated
- deleteEdge(edge, false, forceDelete);
- deleteTypeVertex(vertexForDelete, typeCategory, forceDelete);
- } else {
- //If the vertex is of type class, and its not a composite attributes, the reference AtlasVertex' lifecycle is not controlled
- //through this delete. Hence just remove the reference edge. Leave the reference AtlasVertex as is
-
- //If deleting just the edge, reverse attribute should be updated for any references
- //For example, for the department type system, if the person's manager edge is deleted, subordinates of manager should be updated
- deleteEdge(edge, true, false);
- }
- return !softDelete || forceDelete;
- }
-
- public void deleteEdgeReference(AtlasVertex outVertex, String edgeLabel, DataTypes.TypeCategory typeCategory,
- boolean isComposite) throws AtlasException {
- AtlasEdge edge = graphHelper.getEdgeForLabel(outVertex, edgeLabel);
- if (edge != null) {
- deleteEdgeReference(edge, typeCategory, isComposite, false);
- }
- }
-
- protected void deleteEdge(AtlasEdge edge, boolean updateReverseAttribute, boolean force) throws AtlasException {
- //update reverse attribute
- if (updateReverseAttribute) {
- AttributeInfo attributeInfo = getAttributeForEdge(edge.getLabel());
- if (attributeInfo.reverseAttributeName != null) {
- deleteEdgeBetweenVertices(edge.getInVertex(), edge.getOutVertex(),
- attributeInfo.reverseAttributeName);
- }
- }
-
- deleteEdge(edge, force);
- }
-
- protected void deleteVertex(AtlasVertex instanceVertex, boolean force) throws AtlasException {
- //Update external references(incoming edges) to this vertex
- if (LOG.isDebugEnabled()) {
- LOG.debug("Setting the external references to {} to null(removing edges)", string(instanceVertex));
- }
-
- for (AtlasEdge edge : (Iterable<AtlasEdge>) instanceVertex.getEdges(AtlasEdgeDirection.IN)) {
- Id.EntityState edgeState = GraphHelper.getState(edge);
- if (edgeState == Id.EntityState.ACTIVE) {
- //Delete only the active edge references
- AttributeInfo attribute = getAttributeForEdge(edge.getLabel());
- //TODO use delete edge instead??
- deleteEdgeBetweenVertices(edge.getOutVertex(), edge.getInVertex(), attribute.name);
- }
- }
- _deleteVertex(instanceVertex, force);
- }
-
- protected abstract void _deleteVertex(AtlasVertex instanceVertex, boolean force);
-
- /**
- * Deletes the edge between outvertex and inVertex. The edge is for attribute attributeName of outVertex
- * @param outVertex
- * @param inVertex
- * @param attributeName
- * @throws AtlasException
- */
- protected void deleteEdgeBetweenVertices(AtlasVertex outVertex, AtlasVertex inVertex, String attributeName) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Removing edge from {} to {} with attribute name {}", string(outVertex), string(inVertex),
- attributeName);
- }
-
- String typeName = GraphHelper.getTypeName(outVertex);
- String outId = GraphHelper.getGuid(outVertex);
- Id.EntityState state = GraphHelper.getState(outVertex);
- if ((outId != null && RequestContext.get().isDeletedEntity(outId)) || state == Id.EntityState.DELETED) {
- //If the reference vertex is marked for deletion, skip updating the reference
- return;
- }
-
- IDataType type = typeSystem.getDataType(IDataType.class, typeName);
- AttributeInfo attributeInfo = getFieldMapping(type).fields.get(attributeName);
- String propertyName = GraphHelper.getQualifiedFieldName(type, attributeName);
- String edgeLabel = EDGE_LABEL_PREFIX + propertyName;
- AtlasEdge edge = null;
-
- switch (attributeInfo.dataType().getTypeCategory()) {
- case CLASS:
- //If its class attribute, its the only edge between two vertices
- if (attributeInfo.multiplicity.nullAllowed()) {
- edge = graphHelper.getEdgeForLabel(outVertex, edgeLabel);
- if (shouldUpdateReverseAttribute) {
- GraphHelper.setProperty(outVertex, propertyName, null);
- }
- } else {
- // Cannot unset a required attribute.
- throw new NullRequiredAttributeException("Cannot unset required attribute " + GraphHelper.getQualifiedFieldName(type, attributeName) +
- " on " + GraphHelper.getVertexDetails(outVertex) + " edge = " + edgeLabel);
- }
- break;
-
- case ARRAY:
- //If its array attribute, find the right edge between the two vertices and update array property
- List<String> elements = GraphHelper.getListProperty(outVertex, propertyName);
- if (elements != null) {
- elements = new ArrayList<>(elements); //Make a copy, else list.remove reflects on titan.getProperty()
- for (String elementEdgeId : elements) {
- AtlasEdge elementEdge = graphHelper.getEdgeByEdgeId(outVertex, edgeLabel, elementEdgeId);
- if (elementEdge == null) {
- continue;
- }
-
- AtlasVertex elementVertex = elementEdge.getInVertex();
- if (elementVertex.equals(inVertex)) {
- edge = elementEdge;
-
- //TODO element.size includes deleted items as well. should exclude
- if (!attributeInfo.multiplicity.nullAllowed()
- && elements.size() <= attributeInfo.multiplicity.lower) {
- // Deleting this edge would violate the attribute's lower bound.
- throw new NullRequiredAttributeException(
- "Cannot remove array element from required attribute " +
- GraphHelper.getQualifiedFieldName(type, attributeName) + " on "
- + GraphHelper.getVertexDetails(outVertex) + " " + GraphHelper.getEdgeDetails(elementEdge));
- }
-
- if (shouldUpdateReverseAttribute) {
- //if composite attribute, remove the reference as well. else, just remove the edge
- //for example, when table is deleted, process still references the table
- //but when column is deleted, table will not reference the deleted column
- if (LOG.isDebugEnabled()) {
- LOG.debug("Removing edge {} from the array attribute {}", string(elementEdge),
- attributeName);
- }
-
- // Remove all occurrences of the edge ID from the list.
- // This prevents dangling edge IDs (i.e. edge IDs for deleted edges)
- // from the remaining in the list if there are duplicates.
- elements.removeAll(Collections.singletonList(elementEdge.getId().toString()));
- GraphHelper.setProperty(outVertex, propertyName, elements);
- break;
-
- }
- }
- }
- }
- break;
-
- case MAP:
- //If its map attribute, find the right edge between two vertices and update map property
- List<String> keys = GraphHelper.getListProperty(outVertex, propertyName);
- if (keys != null) {
- keys = new ArrayList<>(keys); //Make a copy, else list.remove reflects on titan.getProperty()
- for (String key : keys) {
- String keyPropertyName = GraphHelper.getQualifiedNameForMapKey(propertyName, key);
- String mapEdgeId = GraphHelper.getSingleValuedProperty(outVertex, keyPropertyName, String.class);
- AtlasEdge mapEdge = graphHelper.getEdgeByEdgeId(outVertex, keyPropertyName, mapEdgeId);
- if(mapEdge != null) {
- AtlasVertex mapVertex = mapEdge.getInVertex();
- if (mapVertex.getId().toString().equals(inVertex.getId().toString())) {
- //TODO keys.size includes deleted items as well. should exclude
- if (attributeInfo.multiplicity.nullAllowed() || keys.size() > attributeInfo.multiplicity.lower) {
- edge = mapEdge;
- } else {
- // Deleting this entry would violate the attribute's lower bound.
- throw new NullRequiredAttributeException(
- "Cannot remove map entry " + keyPropertyName + " from required attribute " +
- GraphHelper.getQualifiedFieldName(type, attributeName) + " on " + GraphHelper.getVertexDetails(outVertex) + " " + GraphHelper.getEdgeDetails(mapEdge));
- }
-
- if (shouldUpdateReverseAttribute) {
- //remove this key
- if (LOG.isDebugEnabled()) {
- LOG.debug("Removing edge {}, key {} from the map attribute {}", string(mapEdge), key,
- attributeName);
- }
-
- keys.remove(key);
- GraphHelper.setProperty(outVertex, propertyName, keys);
- GraphHelper.setProperty(outVertex, keyPropertyName, null);
- }
- break;
- }
- }
- }
- }
- break;
-
- case STRUCT:
- case TRAIT:
- break;
-
- default:
- throw new IllegalStateException("There can't be an edge from " + GraphHelper.getVertexDetails(outVertex) + " to "
- + GraphHelper.getVertexDetails(inVertex) + " with attribute name " + attributeName + " which is not class/array/map attribute");
- }
-
- if (edge != null) {
- deleteEdge(edge, false);
- RequestContext requestContext = RequestContext.get();
- GraphHelper.setProperty(outVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- requestContext.getRequestTime());
- GraphHelper.setProperty(outVertex, Constants.MODIFIED_BY_KEY, requestContext.getUser());
- requestContext.recordEntityUpdate(outId);
- }
- }
-
- protected AttributeInfo getAttributeForEdge(String edgLabel) throws AtlasException {
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(edgLabel);
- IDataType referenceType = typeSystem.getDataType(IDataType.class, atlasEdgeLabel.getTypeName());
- return getFieldMapping(referenceType).fields.get(atlasEdgeLabel.getAttributeName());
- }
-
- protected FieldMapping getFieldMapping(IDataType type) {
- switch (type.getTypeCategory()) {
- case CLASS:
- case TRAIT:
- return ((HierarchicalType)type).fieldMapping();
-
- case STRUCT:
- return ((StructType)type).fieldMapping();
-
- default:
- throw new IllegalStateException("Type " + type + " doesn't have any fields!");
- }
- }
-
- /**
- * Delete all traits from the specified vertex.
- * @param instanceVertex
- * @throws AtlasException
- */
- private void deleteAllTraits(AtlasVertex instanceVertex) throws AtlasException {
- List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Deleting traits {} for {}", traitNames, string(instanceVertex));
- }
-
- String typeName = GraphHelper.getTypeName(instanceVertex);
-
- for (String traitNameToBeDeleted : traitNames) {
- String relationshipLabel = GraphHelper.getTraitLabel(typeName, traitNameToBeDeleted);
- deleteEdgeReference(instanceVertex, relationshipLabel, DataTypes.TypeCategory.TRAIT, false);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/EntityProcessor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/EntityProcessor.java b/repository/src/main/java/org/apache/atlas/repository/graph/EntityProcessor.java
deleted file mode 100644
index 892b36d..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/EntityProcessor.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.ObjectGraphWalker;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-@Deprecated
-public final class EntityProcessor implements ObjectGraphWalker.NodeProcessor {
-
- private final Map<Id, IReferenceableInstance> idToInstanceMap;
-
- public EntityProcessor() {
- idToInstanceMap = new LinkedHashMap<>();
- }
-
- public Collection<IReferenceableInstance> getInstances() {
- ArrayList<IReferenceableInstance> instances = new ArrayList<>(idToInstanceMap.values());
- Collections.reverse(instances);
- return instances;
- }
-
- @Override
- public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
- IReferenceableInstance ref = null;
- Id id = null;
-
- if (nd.attributeName == null) {
- ref = (IReferenceableInstance) nd.instance;
- id = ref.getId();
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (nd.value != null && (nd.value instanceof Id)) {
- id = (Id) nd.value;
- }
- }
-
- if (id != null) {
- if (id.isUnassigned()) {
- if (ref != null) {
- if (idToInstanceMap.containsKey(id)) { // Oops
- throw new RepositoryException(
- String.format("Unexpected internal error: Id %s processed again", id));
- }
-
- idToInstanceMap.put(id, ref);
- }
- }
- }
- }
-
- public void addInstanceIfNotExists(ITypedReferenceableInstance ref) {
- if(!idToInstanceMap.containsKey(ref.getId())) {
- idToInstanceMap.put(ref.getId(), ref);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapper.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapper.java b/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapper.java
deleted file mode 100644
index 2e8ae0c..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapper.java
+++ /dev/null
@@ -1,174 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-import java.util.Map;
-
-@Deprecated
-public class FullTextMapper {
-
- private static final Logger LOG = LoggerFactory.getLogger(FullTextMapper.class);
-
- private final GraphToTypedInstanceMapper graphToTypedInstanceMapper;
- private final TypedInstanceToGraphMapper typedInstanceToGraphMapper;
-
- private static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- private static final String FULL_TEXT_DELIMITER = " ";
-
- public FullTextMapper(TypedInstanceToGraphMapper typedInstanceToGraphMapper,
- GraphToTypedInstanceMapper graphToTypedInstanceMapper) {
- this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
- this.typedInstanceToGraphMapper = typedInstanceToGraphMapper;
- }
-
- public String mapRecursive(AtlasVertex instanceVertex, boolean followReferences) throws AtlasException {
- String guid = GraphHelper.getGuid(instanceVertex);
- ITypedReferenceableInstance typedReference;
- RequestContext context = RequestContext.get();
- typedReference = context.getInstanceV1(guid);
- if (typedReference != null) {
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Cache hit: guid = {}, entityId = {}", guid, typedReference.getId()._getId());
- }
- } else {
- typedReference =
- graphToTypedInstanceMapper.mapGraphToTypedInstance(guid, instanceVertex);
- context.cache(typedReference);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Cache miss: guid = {}, entityId = {}", guid, typedReference.getId().getId());
- }
- }
- String fullText = forInstance(typedReference, followReferences);
- StringBuilder fullTextBuilder =
- new StringBuilder(typedReference.getTypeName()).append(FULL_TEXT_DELIMITER).append(fullText);
-
- List<String> traits = typedReference.getTraits();
- for (String traitName : traits) {
- String traitText = forInstance((ITypedInstance) typedReference.getTrait(traitName), false);
- fullTextBuilder.append(FULL_TEXT_DELIMITER).append(traitName).append(FULL_TEXT_DELIMITER)
- .append(traitText);
- }
- return fullTextBuilder.toString();
- }
-
- private String forAttribute(IDataType type, Object value, boolean followReferences)
- throws AtlasException {
- if (value == null) {
- return null;
- }
- switch (type.getTypeCategory()) {
- case PRIMITIVE:
- return String.valueOf(value);
- case ENUM:
-
- return ((EnumValue) value).value;
-
- case ARRAY:
- StringBuilder fullText = new StringBuilder();
- IDataType elemType = ((DataTypes.ArrayType) type).getElemType();
- List list = (List) value;
-
- for (Object element : list) {
- String elemFullText = forAttribute(elemType, element, false);
- if (StringUtils.isNotEmpty(elemFullText)) {
- fullText = fullText.append(FULL_TEXT_DELIMITER).append(elemFullText);
- }
- }
- return fullText.toString();
-
- case MAP:
- fullText = new StringBuilder();
- IDataType keyType = ((DataTypes.MapType) type).getKeyType();
- IDataType valueType = ((DataTypes.MapType) type).getValueType();
- Map map = (Map) value;
-
- for (Object entryObj : map.entrySet()) {
- Map.Entry entry = (Map.Entry) entryObj;
- String keyFullText = forAttribute(keyType, entry.getKey(), false);
- if (StringUtils.isNotEmpty(keyFullText)) {
- fullText = fullText.append(FULL_TEXT_DELIMITER).append(keyFullText);
- }
- String valueFullText = forAttribute(valueType, entry.getValue(), false);
- if (StringUtils.isNotEmpty(valueFullText)) {
- fullText = fullText.append(FULL_TEXT_DELIMITER).append(valueFullText);
- }
- }
- return fullText.toString();
-
- case CLASS:
- if (followReferences) {
- Id refId = ((ITypedReferenceableInstance) value).getId();
- String refGuid = refId._getId();
- AtlasVertex refVertex = typedInstanceToGraphMapper.lookupVertex(refId);
- if(refVertex == null) {
- refVertex = graphHelper.getVertexForGUID(refGuid);
- }
- return mapRecursive(refVertex, false);
- }
- break;
-
- case STRUCT:
- if (followReferences) {
- return forInstance((ITypedInstance) value, true);
- }
- break;
-
- default:
- throw new IllegalStateException("Unhandled type category " + type.getTypeCategory());
-
- }
- return null;
- }
-
- private String forInstance(ITypedInstance typedInstance, boolean followReferences)
- throws AtlasException {
- StringBuilder fullText = new StringBuilder();
- for (AttributeInfo attributeInfo : typedInstance.fieldMapping().fields.values()) {
- Object attrValue = typedInstance.get(attributeInfo.name);
- if (attrValue == null) {
- continue;
- }
-
- String attrFullText = forAttribute(attributeInfo.dataType(), attrValue, followReferences);
- if (StringUtils.isNotEmpty(attrFullText)) {
- fullText =
- fullText.append(FULL_TEXT_DELIMITER).append(attributeInfo.name).append(FULL_TEXT_DELIMITER)
- .append(attrFullText);
- }
- }
- return fullText.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapperV2.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapperV2.java b/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapperV2.java
index 76acf8c..c42aa15 100644
--- a/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapperV2.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/FullTextMapperV2.java
@@ -17,7 +17,7 @@
*/
package org.apache.atlas.repository.graph;
-import org.apache.atlas.RequestContext;
+import org.apache.atlas.RequestContextV1;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasEntity;
@@ -203,7 +203,7 @@ public class FullTextMapperV2 {
}
private AtlasEntityWithExtInfo getAndCacheEntity(String guid) throws AtlasBaseException {
- RequestContext context = RequestContext.get();
+ RequestContextV1 context = RequestContextV1.get();
AtlasEntityWithExtInfo entityWithExtInfo = context.getInstanceV2(guid);
if (entityWithExtInfo == null) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
deleted file mode 100755
index 74886b5..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepository.java
+++ /dev/null
@@ -1,505 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import com.google.common.base.Preconditions;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.GraphTransactionInterceptor;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.model.instance.GuidMapping;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasGraphQuery;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TraitNotFoundException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.*;
-
-/**
- * An implementation backed by a Graph database provided
- * as a Graph Service.
- */
-@Singleton
-@Component
-@Deprecated
-public class GraphBackedMetadataRepository implements MetadataRepository {
-
- private static final Logger LOG = LoggerFactory.getLogger(GraphBackedMetadataRepository.class);
-
- private static TypeSystem typeSystem = TypeSystem.getInstance();
-
- private static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- private DeleteHandler deleteHandler;
-
- private final AtlasGraph atlasGraph;
- private final GraphToTypedInstanceMapper graphToInstanceMapper;
-
- @Inject
- public GraphBackedMetadataRepository(DeleteHandler deleteHandler, AtlasGraph atlasGraph) {
- this.atlasGraph = atlasGraph;
- this.graphToInstanceMapper = new GraphToTypedInstanceMapper(atlasGraph);
- this.deleteHandler = deleteHandler;
- }
-
- public GraphToTypedInstanceMapper getGraphToInstanceMapper() {
- return graphToInstanceMapper;
- }
-
- @Override
- public String getTypeAttributeName() {
- return Constants.ENTITY_TYPE_PROPERTY_KEY;
- }
-
- @Override
- public String getStateAttributeName() {
- return Constants.STATE_PROPERTY_KEY;
- }
-
- /**
- * Returns the property key used to store super type names.
- *
- * @return property key used to store super type names.
- */
- @Override
- public String getSuperTypeAttributeName() {
- return Constants.SUPER_TYPES_PROPERTY_KEY;
- }
-
- public String getIdAttributeName() {
- return Constants.GUID_PROPERTY_KEY;
- }
-
- @Override
- public String getVersionAttributeName() {
- return Constants.VERSION_PROPERTY_KEY;
- }
-
- @Override
- public String getTraitLabel(IDataType<?> dataType, String traitName) {
- return GraphHelper.getTraitLabel(dataType.getName(), traitName);
- }
-
- @Override
- public String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException {
- if (aInfo.name.startsWith(Constants.INTERNAL_PROPERTY_KEY_PREFIX)) {
- return aInfo.name;
- }
- return GraphHelper.encodePropertyKey(GraphHelper.getQualifiedFieldName(dataType, aInfo.name));
- }
-
- public String getFieldNameInVertex(IDataType<?> dataType, String attrName) throws AtlasException {
- return GraphHelper.getQualifiedFieldName(dataType, attrName);
- }
-
- @Override
- public String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException {
- return GraphHelper.getEdgeLabel(dataType, aInfo);
- }
-
- @Override
- @GraphTransaction
- public CreateUpdateEntitiesResult createEntities(ITypedReferenceableInstance... entities) throws RepositoryException,
- EntityExistsException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("adding entities={}", entities);
- }
-
- try {
- TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
- instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.CREATE, entities);
- List<String> createdGuids = RequestContext.get().getCreatedEntityIds();
- CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult();
- EntityResult entityResult = new EntityResult(createdGuids, null, null);
- GuidMapping mapping = instanceToGraphMapper.createGuidMapping();
- result.setEntityResult(entityResult);
- result.setGuidMapping(mapping);
- return result;
- } catch (EntityExistsException e) {
- throw e;
- } catch (AtlasException e) {
- throw new RepositoryException(e);
- }
- }
-
- @Override
- @GraphTransaction
- public ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException, EntityNotFoundException {
- return getEntityDefinitions(guid).get(0);
- }
-
- @Override
- @GraphTransaction
- public List<ITypedReferenceableInstance> getEntityDefinitions(String... guids) throws RepositoryException, EntityNotFoundException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Retrieving entities with guids={}", Arrays.toString(guids));
- }
-
- RequestContext context = RequestContext.get();
- ITypedReferenceableInstance[] result = new ITypedReferenceableInstance[guids.length];
-
- // Map of the guids of instances not in the cache to their index(es) in the result.
- // This is used to put the loaded instances into the location(s) corresponding
- // to their guid in the result. Note that a set is needed since guids can
- // appear more than once in the list.
- Map<String, Set<Integer>> uncachedGuids = new HashMap<>();
-
- for (int i = 0; i < guids.length; i++) {
- String guid = guids[i];
-
- // First, check the cache.
- ITypedReferenceableInstance cached = context.getInstanceV1(guid);
- if (cached != null) {
- result[i] = cached;
- } else {
- Set<Integer> indices = uncachedGuids.get(guid);
- if (indices == null) {
- indices = new HashSet<>(1);
- uncachedGuids.put(guid, indices);
- }
- indices.add(i);
- }
- }
-
- List<String> guidsToFetch = new ArrayList<>(uncachedGuids.keySet());
- Map<String, AtlasVertex> instanceVertices = graphHelper.getVerticesForGUIDs(guidsToFetch);
-
- // search for missing entities
- if (instanceVertices.size() != guidsToFetch.size()) {
- Set<String> missingGuids = new HashSet<String>(guidsToFetch);
- missingGuids.removeAll(instanceVertices.keySet());
- if (!missingGuids.isEmpty()) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Failed to find guids={}", missingGuids);
- }
- throw new EntityNotFoundException(
- "Could not find entities in the repository with guids: " + missingGuids.toString());
- }
- }
-
- for (String guid : guidsToFetch) {
- try {
- ITypedReferenceableInstance entity = graphToInstanceMapper.mapGraphToTypedInstance(guid, instanceVertices.get(guid));
- for(int index : uncachedGuids.get(guid)) {
- result[index] = entity;
- }
- } catch (AtlasException e) {
- throw new RepositoryException(e);
- }
- }
- return Arrays.asList(result);
- }
-
- @Override
- @GraphTransaction
- public ITypedReferenceableInstance getEntityDefinition(String entityType, String attribute, Object value)
- throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Retrieving entity with type={} and {}={}", entityType, attribute, value);
- }
-
- IDataType type = typeSystem.getDataType(IDataType.class, entityType);
- String propertyKey = getFieldNameInVertex(type, attribute);
- AtlasVertex instanceVertex = graphHelper.findVertex(propertyKey, value,
- Constants.ENTITY_TYPE_PROPERTY_KEY, entityType,
- Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
-
- String guid = GraphHelper.getGuid(instanceVertex);
- ITypedReferenceableInstance cached = RequestContext.get().getInstanceV1(guid);
- if(cached != null) {
- return cached;
- }
- return graphToInstanceMapper.mapGraphToTypedInstance(guid, instanceVertex);
- }
-
- @Override
- @GraphTransaction
- public List<String> getEntityList(String entityType) throws RepositoryException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Retrieving entity list for type={}", entityType);
- }
-
- AtlasGraphQuery query = getGraph().query().has(Constants.ENTITY_TYPE_PROPERTY_KEY, entityType);
- Iterator<AtlasVertex> results = query.vertices().iterator();
- if (!results.hasNext()) {
- return Collections.emptyList();
- }
-
- ArrayList<String> entityList = new ArrayList<>();
- while (results.hasNext()) {
- AtlasVertex vertex = results.next();
- entityList.add(GraphHelper.getGuid(vertex));
- }
-
- return entityList;
- }
-
- /**
- * Gets the list of trait names for a given entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @return a list of trait names for the given entity guid
- * @throws RepositoryException
- */
- @Override
- @GraphTransaction
- public List<String> getTraitNames(String guid) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Retrieving trait names for entity={}", guid);
- }
-
- AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid);
- return GraphHelper.getTraitNames(instanceVertex);
- }
-
- /**
- * Adds a new trait to the list of entities represented by their respective guids
- * @param entityGuids list of globally unique identifier for the entities
- * @param traitInstance trait instance that needs to be added to entities
- * @throws RepositoryException
- */
- @Override
- @GraphTransaction
- public void addTrait(List<String> entityGuids, ITypedStruct traitInstance) throws RepositoryException {
- Preconditions.checkNotNull(entityGuids, "entityGuids list cannot be null");
- Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding a new trait={} for entities={}", traitInstance.getTypeName(), entityGuids);
- }
-
- GraphTransactionInterceptor.lockObjectAndReleasePostCommit(entityGuids);
- for (String entityGuid : entityGuids) {
- addTraitImpl(entityGuid, traitInstance);
- }
- }
-
- /**
- * Adds a new trait to an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitInstance trait instance that needs to be added to entity
- * @throws RepositoryException
- */
- @Override
- @GraphTransaction
- public void addTrait(String guid, ITypedStruct traitInstance) throws RepositoryException {
- Preconditions.checkNotNull(guid, "guid cannot be null");
- Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
-
- GraphTransactionInterceptor.lockObjectAndReleasePostCommit(guid);
- addTraitImpl(guid, traitInstance);
- }
-
- private void addTraitImpl(String guid, ITypedStruct traitInstance) throws RepositoryException {
- final String traitName = traitInstance.getTypeName();
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding a new trait={} for entity={}", traitName, guid);
- }
-
- try {
- AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid);
-
- // add the trait instance as a new vertex
- final String typeName = GraphHelper.getTypeName(instanceVertex);
-
- TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
- instanceToGraphMapper.mapTraitInstanceToVertex(traitInstance,
- typeSystem.getDataType(ClassType.class, typeName), instanceVertex);
-
-
- // update the traits in entity once adding trait instance is successful
- GraphHelper.addProperty(instanceVertex, Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
- GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- RequestContext.get().getRequestTime());
- GraphHelper.setProperty(instanceVertex, Constants.MODIFIED_BY_KEY, RequestContext.get().getUser());
-
- } catch (RepositoryException e) {
- throw e;
- } catch (Exception e) {
- throw new RepositoryException(e);
- }
- }
-
- /**
- * Deletes a given trait from an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitNameToBeDeleted name of the trait
- * @throws RepositoryException
- */
- @Override
- @GraphTransaction
- public void deleteTrait(String guid, String traitNameToBeDeleted) throws TraitNotFoundException, EntityNotFoundException, RepositoryException {
- LOG.debug("Deleting trait={} from entity={}", traitNameToBeDeleted, guid);
- GraphTransactionInterceptor.lockObjectAndReleasePostCommit(guid);
-
- AtlasVertex instanceVertex = graphHelper.getVertexForGUID(guid);
-
- List<String> traitNames = GraphHelper.getTraitNames(instanceVertex);
- if (!traitNames.contains(traitNameToBeDeleted)) {
- throw new TraitNotFoundException(
- "Could not find trait=" + traitNameToBeDeleted + " in the repository for entity: " + guid);
- }
-
- try {
- final String entityTypeName = GraphHelper.getTypeName(instanceVertex);
- String relationshipLabel = GraphHelper.getTraitLabel(entityTypeName, traitNameToBeDeleted);
- AtlasEdge edge = graphHelper.getEdgeForLabel(instanceVertex, relationshipLabel);
- if(edge != null) {
- deleteHandler.deleteEdgeReference(edge, DataTypes.TypeCategory.TRAIT, false, true);
- }
-
- // update the traits in entity once trait removal is successful
- traitNames.remove(traitNameToBeDeleted);
- updateTraits(instanceVertex, traitNames);
- } catch (Exception e) {
- throw new RepositoryException(e);
- }
- }
-
-
- private void updateTraits(AtlasVertex instanceVertex, List<String> traitNames) {
- // remove the key
- instanceVertex.removeProperty(Constants.TRAIT_NAMES_PROPERTY_KEY);
-
- // add it back again
- for (String traitName : traitNames) {
- GraphHelper.addProperty(instanceVertex, Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
- }
- GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- RequestContext.get().getRequestTime());
- GraphHelper.setProperty(instanceVertex, Constants.MODIFIED_BY_KEY, RequestContext.get().getUser());
- }
-
- @Override
- @GraphTransaction
- public CreateUpdateEntitiesResult updateEntities(ITypedReferenceableInstance... entitiesUpdated) throws RepositoryException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("updating entity {}", entitiesUpdated);
- }
-
- try {
- TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
- instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_FULL,
- entitiesUpdated);
- CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult();
- RequestContext requestContext = RequestContext.get();
- result.setEntityResult(createEntityResultFromContext(requestContext));
- GuidMapping mapping = instanceToGraphMapper.createGuidMapping();
- result.setGuidMapping(mapping);
- return result;
- } catch (AtlasException e) {
- throw new RepositoryException(e);
- }
- }
-
- @Override
- @GraphTransaction
- public CreateUpdateEntitiesResult updatePartial(ITypedReferenceableInstance entity) throws RepositoryException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("updating entity {}", entity);
- }
-
- try {
- TypedInstanceToGraphMapper instanceToGraphMapper = new TypedInstanceToGraphMapper(graphToInstanceMapper, deleteHandler);
- instanceToGraphMapper.mapTypedInstanceToGraph(TypedInstanceToGraphMapper.Operation.UPDATE_PARTIAL, entity);
- RequestContext requestContext = RequestContext.get();
- CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult();
- GuidMapping mapping = instanceToGraphMapper.createGuidMapping();
- result.setEntityResult(createEntityResultFromContext(requestContext));
- result.setGuidMapping(mapping);
- return result;
- } catch (AtlasException e) {
- throw new RepositoryException(e);
- }
- }
-
-
-
- @Override
- @GraphTransaction
- public EntityResult deleteEntities(List<String> guids) throws RepositoryException {
-
- if (guids == null || guids.size() == 0) {
- throw new IllegalArgumentException("guids must be non-null and non-empty");
- }
-
- // Retrieve vertices for requested guids.
- Map<String, AtlasVertex> vertices = graphHelper.getVerticesForGUIDs(guids);
- Collection<AtlasVertex> deletionCandidates = vertices.values();
-
- if(LOG.isDebugEnabled()) {
- for(String guid : guids) {
- if(! vertices.containsKey(guid)) {
- // Entity does not exist - treat as non-error, since the caller
- // wanted to delete the entity and it's already gone.
- LOG.debug("Deletion request ignored for non-existent entity with guid " + guid);
- }
- }
- }
-
- if (deletionCandidates.isEmpty()) {
- LOG.info("No deletion candidate entities were found for guids %s", guids);
- return new EntityResult(Collections.<String>emptyList(), Collections.<String>emptyList(), Collections.<String>emptyList());
- }
-
- try {
- deleteHandler.deleteEntities(deletionCandidates);
- }
- catch (AtlasException e) {
- throw new RepositoryException(e);
- }
-
- RequestContext requestContext = RequestContext.get();
- return createEntityResultFromContext(requestContext);
- }
-
- private EntityResult createEntityResultFromContext(RequestContext requestContext) {
- return new EntityResult(
- requestContext.getCreatedEntityIds(),
- requestContext.getUpdatedEntityIds(),
- requestContext.getDeletedEntityIds());
- }
-
- public AtlasGraph getGraph() throws RepositoryException {
- return atlasGraph;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
index 6eee24b..9f1206c 100755
--- a/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
+++ b/repository/src/main/java/org/apache/atlas/repository/graph/GraphBackedSearchIndexer.java
@@ -34,7 +34,6 @@ import org.apache.atlas.model.typedef.AtlasEnumDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.IndexCreationException;
import org.apache.atlas.repository.IndexException;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.repository.graphdb.AtlasCardinality;
@@ -49,13 +48,6 @@ import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
@@ -66,11 +58,9 @@ import javax.inject.Inject;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
-import java.util.Map;
import java.util.Set;
import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.*;
@@ -80,8 +70,7 @@ import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.*;
* Adds index for properties of a given type when its added before any instances are added.
*/
@Component
-public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChangeHandler,
- TypeDefChangeListener {
+public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChangeHandler, TypeDefChangeListener {
private static final Logger LOG = LoggerFactory.getLogger(GraphBackedSearchIndexer.class);
@@ -118,6 +107,110 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
}
/**
+ * Initialize global indices for Titan graph on server activation.
+ *
+ * Since the indices are shared state, we need to do this only from an active instance.
+ */
+ @Override
+ public void instanceIsActive() throws AtlasException {
+ LOG.info("Reacting to active: initializing index");
+ try {
+ initialize();
+ } catch (RepositoryException | IndexException e) {
+ throw new AtlasException("Error in reacting to active on initialization", e);
+ }
+ }
+
+ @Override
+ public void instanceIsPassive() {
+ LOG.info("Reacting to passive state: No action right now.");
+ }
+
+ @Override
+ public int getHandlerOrder() {
+ return HandlerOrder.GRAPH_BACKED_SEARCH_INDEXER.getOrder();
+ }
+
+ @Override
+ public void onChange(ChangedTypeDefs changedTypeDefs) throws AtlasBaseException {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Processing changed typedefs {}", changedTypeDefs);
+ }
+ AtlasGraphManagement management = null;
+ try {
+ management = provider.get().getManagementSystem();
+
+ // Update index for newly created types
+ if (CollectionUtils.isNotEmpty(changedTypeDefs.getCreateTypeDefs())) {
+ for (AtlasBaseTypeDef typeDef : changedTypeDefs.getCreateTypeDefs()) {
+ updateIndexForTypeDef(management, typeDef);
+ }
+ }
+
+ // Update index for updated types
+ if (CollectionUtils.isNotEmpty(changedTypeDefs.getUpdatedTypeDefs())) {
+ for (AtlasBaseTypeDef typeDef : changedTypeDefs.getUpdatedTypeDefs()) {
+ updateIndexForTypeDef(management, typeDef);
+ }
+ }
+
+ // Invalidate the property key for deleted types
+ if (CollectionUtils.isNotEmpty(changedTypeDefs.getDeletedTypeDefs())) {
+ for (AtlasBaseTypeDef typeDef : changedTypeDefs.getDeletedTypeDefs()) {
+ cleanupIndices(management, typeDef);
+ }
+ }
+
+ //Commit indexes
+ commit(management);
+ } catch (RepositoryException | IndexException e) {
+ LOG.error("Failed to update indexes for changed typedefs", e);
+ attemptRollback(changedTypeDefs, management);
+ }
+
+ }
+
+ public Set<String> getVertexIndexKeys() {
+ if (recomputeIndexedKeys) {
+ AtlasGraphManagement management = null;
+
+ try {
+ management = provider.get().getManagementSystem();
+
+ if (management != null) {
+ AtlasGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
+
+ if (vertexIndex != null) {
+ recomputeIndexedKeys = false;
+
+ Set<String> indexKeys = new HashSet<>();
+
+ for (AtlasPropertyKey fieldKey : vertexIndex.getFieldKeys()) {
+ indexKeys.add(fieldKey.getName());
+ }
+
+ vertexIndexKeys = indexKeys;
+ }
+
+ management.commit();
+ }
+ } catch (Exception excp) {
+ LOG.error("getVertexIndexKeys(): failed to get indexedKeys from graph", excp);
+
+ if (management != null) {
+ try {
+ management.rollback();
+ } catch (Exception e) {
+ LOG.error("getVertexIndexKeys(): rollback failed", e);
+ }
+ }
+ }
+ }
+
+ return vertexIndexKeys;
+ }
+
+ /**
* Initializes the indices for the graph - create indices for Global AtlasVertex Keys
*/
private void initialize() throws RepositoryException, IndexException {
@@ -220,81 +313,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
true, true);
}
- /**
- * This is upon adding a new type to Store.
- *
- * @param dataTypes data type
- * @throws AtlasException
- */
- @Override
- public void onAdd(Collection<? extends IDataType> dataTypes) throws AtlasException {
- AtlasGraphManagement management = provider.get().getManagementSystem();
-
- for (IDataType dataType : dataTypes) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Creating indexes for type name={}, definition={}", dataType.getName(), dataType.getClass());
- }
-
- try {
- addIndexForType(management, dataType);
- LOG.info("Index creation for type {} complete", dataType.getName());
- } catch (Throwable throwable) {
- LOG.error("Error creating index for type {}", dataType, throwable);
- //Rollback indexes if any failure
- rollback(management);
- throw new IndexCreationException("Error while creating index for type " + dataType, throwable);
- }
- }
-
- //Commit indexes
- commit(management);
- }
-
- @Override
- public void onChange(Collection<? extends IDataType> dataTypes) throws AtlasException {
- onAdd(dataTypes);
- }
-
- public Set<String> getVertexIndexKeys() {
- if (recomputeIndexedKeys) {
- AtlasGraphManagement management = null;
-
- try {
- management = provider.get().getManagementSystem();
-
- if (management != null) {
- AtlasGraphIndex vertexIndex = management.getGraphIndex(Constants.VERTEX_INDEX);
-
- if (vertexIndex != null) {
- recomputeIndexedKeys = false;
-
- Set<String> indexKeys = new HashSet<>();
-
- for (AtlasPropertyKey fieldKey : vertexIndex.getFieldKeys()) {
- indexKeys.add(fieldKey.getName());
- }
-
- vertexIndexKeys = indexKeys;
- }
-
- management.commit();
- }
- } catch (Exception excp) {
- LOG.error("getVertexIndexKeys(): failed to get indexedKeys from graph", excp);
-
- if (management != null) {
- try {
- management.rollback();
- } catch (Exception e) {
- LOG.error("getVertexIndexKeys(): rollback failed", e);
- }
- }
- }
- }
-
- return vertexIndexKeys;
- }
-
private void addIndexForType(AtlasGraphManagement management, AtlasBaseTypeDef typeDef) {
if (typeDef instanceof AtlasEnumDef) {
// Only handle complex types like Struct, Classification and Entity
@@ -414,82 +432,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
throw new IllegalArgumentException(String.format("Bad cardinality %s", cardinality));
}
- private void addIndexForType(AtlasGraphManagement management, IDataType dataType) {
- switch (dataType.getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- case ARRAY:
- case MAP:
- // do nothing since these are only attributes
- // and not types like structs, traits or classes
- break;
-
- case STRUCT:
- StructType structType = (StructType) dataType;
- createIndexForFields(management, structType, structType.fieldMapping().fields);
- break;
-
- case TRAIT:
- TraitType traitType = (TraitType) dataType;
- createIndexForFields(management, traitType, traitType.fieldMapping().fields);
- break;
-
- case CLASS:
- ClassType classType = (ClassType) dataType;
- createIndexForFields(management, classType, classType.fieldMapping().fields);
- break;
-
- default:
- throw new IllegalArgumentException("bad data type" + dataType);
- }
- }
-
- private void createIndexForFields(AtlasGraphManagement management, IDataType dataType, Map<String, AttributeInfo> fields) {
- for (AttributeInfo field : fields.values()) {
- createIndexForAttribute(management, dataType.getName(), field);
- }
- }
-
- private void createIndexForAttribute(AtlasGraphManagement management, String typeName, AttributeInfo field) {
- final String propertyName = GraphHelper.encodePropertyKey(typeName + "." + field.name);
- switch (field.dataType().getTypeCategory()) {
- case PRIMITIVE:
- AtlasCardinality cardinality = getCardinality(field.multiplicity);
- createIndexes(management, propertyName, getPrimitiveClass(field.dataType()), field.isUnique,
- cardinality, false, field.isIndexable);
- break;
-
- case ENUM:
- cardinality = getCardinality(field.multiplicity);
- createIndexes(management, propertyName, String.class, field.isUnique, cardinality, false, field.isIndexable);
- break;
-
- case ARRAY:
- createLabelIfNeeded(management, propertyName, field.dataType().getName());
- break;
- case MAP:
- // todo - how do we overcome this limitation?
- // IGNORE: Can only index single-valued property keys on vertices in Mixed Index
- break;
-
- case STRUCT:
- StructType structType = (StructType) field.dataType();
- createIndexForFields(management, structType, structType.fieldMapping().fields);
- break;
-
- case TRAIT:
- // do nothing since this is NOT contained in other types
- break;
-
- case CLASS:
- createEdgeLabel(management, propertyName);
- break;
-
- default:
- throw new IllegalArgumentException("bad data type" + field.dataType().getName());
- }
- }
-
private void createEdgeLabel(final AtlasGraphManagement management, final String propertyName) {
// Create the edge label upfront to avoid running into concurrent call issue (ATLAS-2092)
// ATLAS-2092 addresses this problem by creating the edge label upfront while type creation
@@ -506,50 +448,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
}
}
- private Class getPrimitiveClass(IDataType dataType) {
- if (dataType == DataTypes.STRING_TYPE) {
- return String.class;
- } else if (dataType == DataTypes.SHORT_TYPE) {
- return Short.class;
- } else if (dataType == DataTypes.INT_TYPE) {
- return Integer.class;
- } else if (dataType == DataTypes.BIGINTEGER_TYPE) {
- return BigInteger.class;
- } else if (dataType == DataTypes.BOOLEAN_TYPE) {
- return Boolean.class;
- } else if (dataType == DataTypes.BYTE_TYPE) {
- return Byte.class;
- } else if (dataType == DataTypes.LONG_TYPE) {
- return Long.class;
- } else if (dataType == DataTypes.FLOAT_TYPE) {
- return Float.class;
- } else if (dataType == DataTypes.DOUBLE_TYPE) {
- return Double.class;
- } else if (dataType == DataTypes.BIGDECIMAL_TYPE) {
- return BigDecimal.class;
- } else if (dataType == DataTypes.DATE_TYPE) {
- //Indexing with date converted to long as of now since Titan is yet to add support for Date type with mixed indexes
- return Long.class;
- }
-
-
- throw new IllegalArgumentException("unknown data type " + dataType);
- }
-
-
- private AtlasCardinality getCardinality(Multiplicity multiplicity) {
- if (multiplicity == Multiplicity.OPTIONAL || multiplicity == Multiplicity.REQUIRED) {
- return AtlasCardinality.SINGLE;
- } else if (multiplicity == Multiplicity.COLLECTION) {
- return AtlasCardinality.LIST;
- } else if (multiplicity == Multiplicity.SET) {
- return AtlasCardinality.SET;
- }
-
- // todo - default to LIST as this is the most forgiving
- return AtlasCardinality.LIST;
- }
-
private AtlasPropertyKey createIndexes(AtlasGraphManagement management, String propertyName, Class propertyClass,
boolean isUnique, AtlasCardinality cardinality, boolean createCompositeForAttribute,
boolean createCompositeWithTypeandSuperTypes) {
@@ -677,70 +575,6 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
}
}
- /**
- * Initialize global indices for Titan graph on server activation.
- *
- * Since the indices are shared state, we need to do this only from an active instance.
- */
- @Override
- public void instanceIsActive() throws AtlasException {
- LOG.info("Reacting to active: initializing index");
- try {
- initialize();
- } catch (RepositoryException | IndexException e) {
- throw new AtlasException("Error in reacting to active on initialization", e);
- }
- }
-
- @Override
- public void instanceIsPassive() {
- LOG.info("Reacting to passive state: No action right now.");
- }
-
- @Override
- public int getHandlerOrder() {
- return HandlerOrder.GRAPH_BACKED_SEARCH_INDEXER.getOrder();
- }
-
- @Override
- public void onChange(ChangedTypeDefs changedTypeDefs) throws AtlasBaseException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Processing changed typedefs {}", changedTypeDefs);
- }
- AtlasGraphManagement management = null;
- try {
- management = provider.get().getManagementSystem();
-
- // Update index for newly created types
- if (CollectionUtils.isNotEmpty(changedTypeDefs.getCreateTypeDefs())) {
- for (AtlasBaseTypeDef typeDef : changedTypeDefs.getCreateTypeDefs()) {
- updateIndexForTypeDef(management, typeDef);
- }
- }
-
- // Update index for updated types
- if (CollectionUtils.isNotEmpty(changedTypeDefs.getUpdatedTypeDefs())) {
- for (AtlasBaseTypeDef typeDef : changedTypeDefs.getUpdatedTypeDefs()) {
- updateIndexForTypeDef(management, typeDef);
- }
- }
-
- // Invalidate the property key for deleted types
- if (CollectionUtils.isNotEmpty(changedTypeDefs.getDeletedTypeDefs())) {
- for (AtlasBaseTypeDef typeDef : changedTypeDefs.getDeletedTypeDefs()) {
- cleanupIndices(management, typeDef);
- }
- }
-
- //Commit indexes
- commit(management);
- } catch (RepositoryException | IndexException e) {
- LOG.error("Failed to update indexes for changed typedefs", e);
- attemptRollback(changedTypeDefs, management);
- }
-
- }
-
private void cleanupIndices(AtlasGraphManagement management, AtlasBaseTypeDef typeDef) {
Preconditions.checkNotNull(typeDef, "Cannot process null typedef");
if (LOG.isDebugEnabled()) {
@@ -816,14 +650,4 @@ public class GraphBackedSearchIndexer implements SearchIndexer, ActiveStateChang
addIndexForType(management, typeDef);
LOG.info("Index creation for type {} complete", typeDef.getName());
}
-
- /* Commenting this out since we do not need an index for edge label here
- private void createEdgeMixedIndex(String propertyName) {
- EdgeLabel edgeLabel = management.getEdgeLabel(propertyName);
- if (edgeLabel == null) {
- edgeLabel = management.makeEdgeLabel(propertyName).make();
- management.buildEdgeIndex(edgeLabel, propertyName, Direction.BOTH, Order.DEFAULT);
- LOG.info("Created index for edge label {}", propertyName);
- }
- }*/
}
[25/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala b/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
deleted file mode 100644
index 37015d8..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/GremlinQuery.scala
+++ /dev/null
@@ -1,806 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import java.lang.Boolean
-import java.lang.Byte
-import java.lang.Double
-import java.lang.Float
-import java.lang.Integer
-import java.lang.Long
-import java.lang.Short
-import java.util.ArrayList
-
-import scala.collection.JavaConversions.asScalaBuffer
-import scala.collection.JavaConversions.bufferAsJavaList
-import scala.collection.mutable
-import scala.collection.mutable.ArrayBuffer
-
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory
-import org.apache.atlas.gremlin.optimizer.GremlinQueryOptimizer
-import org.apache.atlas.groovy.CastExpression
-import org.apache.atlas.groovy.ClosureExpression
-import org.apache.atlas.groovy.LabeledExpression
-import org.apache.atlas.groovy.FunctionCallExpression
-import org.apache.atlas.groovy.GroovyExpression
-import org.apache.atlas.groovy.GroovyGenerationContext
-import org.apache.atlas.groovy.IdentifierExpression
-import org.apache.atlas.groovy.ListExpression
-import org.apache.atlas.groovy.LiteralExpression
-import org.apache.atlas.groovy.TraversalStepType
-import org.apache.atlas.query.Expressions.AliasExpression
-import org.apache.atlas.query.Expressions.ArithmeticExpression
-import org.apache.atlas.query.Expressions.BackReference
-import org.apache.atlas.query.Expressions.ClassExpression
-import org.apache.atlas.query.Expressions.ComparisonExpression
-import org.apache.atlas.query.Expressions.Expression
-import org.apache.atlas.query.Expressions.ExpressionException
-import org.apache.atlas.query.Expressions.FieldExpression
-import org.apache.atlas.query.Expressions.FilterExpression
-import org.apache.atlas.query.Expressions.InstanceExpression
-import org.apache.atlas.query.Expressions.LimitExpression
-import org.apache.atlas.query.Expressions.ListLiteral
-import org.apache.atlas.query.Expressions.Literal
-import org.apache.atlas.query.Expressions.LogicalExpression
-import org.apache.atlas.query.Expressions.LoopExpression
-import org.apache.atlas.query.Expressions.OrderExpression
-import org.apache.atlas.query.Expressions.PathExpression
-import org.apache.atlas.query.Expressions.SelectExpression
-import org.apache.atlas.query.Expressions.TraitExpression
-import org.apache.atlas.query.Expressions.TraitInstanceExpression
-import org.apache.atlas.query.Expressions.hasFieldLeafExpression
-import org.apache.atlas.query.Expressions.hasFieldUnaryExpression
-import org.apache.atlas.query.Expressions.id
-import org.apache.atlas.query.Expressions.isTraitLeafExpression
-import org.apache.atlas.query.Expressions.isTraitUnaryExpression
-import org.apache.atlas.repository.RepositoryException
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection
-import org.apache.atlas.typesystem.types.DataTypes
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory
-import org.apache.atlas.typesystem.types.IDataType
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.joda.time.format.ISODateTimeFormat
-import org.apache.atlas.query.Expressions.GroupByExpression
-import org.apache.atlas.query.Expressions.MaxExpression
-import org.apache.atlas.query.Expressions.MinExpression
-import org.apache.atlas.query.Expressions.SumExpression
-import org.apache.atlas.query.Expressions.CountExpression
-
-import org.apache.atlas.util.AtlasRepositoryConfiguration
-import java.util.HashSet
-
-trait IntSequence {
- def next: Int
-}
-
-case class GremlinQuery(expr: Expression, queryStr: String, resultMaping: Map[String, (String, Int)]) {
-
- def hasSelectList = resultMaping != null
-
- def isPathExpression = expr.isInstanceOf[PathExpression]
-
- def isGroupBy = expr.isInstanceOf[GroupByExpression]
-}
-
-
-trait SelectExpressionHandling {
-
- class AliasFinder extends PartialFunction[Expression,Unit] {
- val aliases = new HashSet[String]()
-
- def isDefinedAt(e: Expression) = true
-
- def apply(e: Expression) = e match {
- case e@AliasExpression(_, alias) => {
- aliases.add(alias)
- }
- case x => Unit
- }
- }
-
- class ReplaceAliasWithBackReference(aliases: HashSet[String]) extends PartialFunction[Expression, Expression] {
-
- def isDefinedAt(e: Expression) = true
-
- def apply(e: Expression) = e match {
- case e@AliasExpression(child,alias) if aliases.contains(alias) => {
- new BackReference(alias, child, None)
- }
- case x => x
- }
- }
-
- // Removes back references in comparison expressions that are
- // right after an alias expression.
- //
- //For example:
- // .as('x').and(select('x').has(y),...) is changed to
- // .as('x').and(has(y),...)
- //
- //This allows the "has" to be extracted out of the and/or by
- //the GremlinQueryOptimizer so the index can be used to evaluate
- //the predicate.
-
- val RemoveUnneededBackReferences : PartialFunction[Expression, Expression] = {
-
- case filterExpr@FilterExpression(aliasExpr@AliasExpression(_,aliasName), filterChild) => {
- val updatedChild = removeUnneededBackReferences(filterChild, aliasName)
- val changed = !(updatedChild eq filterChild)
- if(changed) {
- FilterExpression(aliasExpr, updatedChild)
- }
- else {
- filterExpr
- }
-
- }
- case x => x
- }
- def removeUnneededBackReferences(expr: Expression, outerAlias: String) : Expression = expr match {
- case logicalExpr@LogicalExpression(logicalOp,children) => {
- var changed : Boolean = false;
- val updatedChildren : List[Expression] = children.map { child =>
- val updatedChild = removeUnneededBackReferences(child, outerAlias);
- changed |= ! (updatedChild eq child);
- updatedChild
- }
- if(changed) {
- LogicalExpression(logicalOp,updatedChildren)
- }
- else {
- logicalExpr
- }
- }
- case comparisonExpr@ComparisonExpression(_,_,_) => {
- var changed = false
- val updatedLeft = removeUnneededBackReferences(comparisonExpr.left, outerAlias);
- changed |= !( updatedLeft eq comparisonExpr.left);
-
- val updatedRight = removeUnneededBackReferences(comparisonExpr.right, outerAlias);
- changed |= !(updatedRight eq comparisonExpr.right);
-
- if (changed) {
- ComparisonExpression(comparisonExpr.symbol, updatedLeft, updatedRight)
- } else {
- comparisonExpr
- }
- }
- case FieldExpression(fieldName, fieldInfo, Some(br @ BackReference(brAlias, _, _))) if outerAlias.equals(brAlias) => {
- //Remove the back reference, since the thing it references is right in front
- //of the comparison expression we're in
- FieldExpression(fieldName, fieldInfo, None)
- }
- case x => x
- }
-
- //in groupby, convert alias expressions defined in the group by child to BackReferences
- //in the groupby list and selectList.
- val AddBackReferencesToGroupBy : PartialFunction[Expression, Expression] = {
- case GroupByExpression(child, groupBy, selExpr) => {
-
- val aliases = ArrayBuffer[AliasExpression]()
- val finder = new AliasFinder();
- child.traverseUp(finder);
-
- val replacer = new ReplaceAliasWithBackReference(finder.aliases)
-
- val newGroupBy = new SelectExpression(
- groupBy.child.transformUp(replacer),
- groupBy.selectList.map {
- expr => expr.transformUp(replacer)
-
- },
- groupBy.forGroupBy);
-
- val newSelExpr = new SelectExpression(
- selExpr.child.transformUp(replacer),
- selExpr.selectList.map {
- expr => expr.transformUp(replacer)
- },
- selExpr.forGroupBy);
-
- new GroupByExpression(child, newGroupBy, newSelExpr)
- }
- case x => x
- }
- /**
- * To aide in gremlinQuery generation add an alias to the input of SelectExpressions
- */
- class AddAliasToSelectInput extends PartialFunction[Expression, Expression] {
-
- private var idx = 0
-
- def isDefinedAt(e: Expression) = true
-
- class DecorateFieldWithAlias(aliasE: AliasExpression)
- extends PartialFunction[Expression, Expression] {
- def isDefinedAt(e: Expression) = true
-
- def apply(e: Expression) = e match {
- case fe@FieldExpression(fieldName, fInfo, None) =>
- FieldExpression(fieldName, fInfo, Some(BackReference(aliasE.alias, aliasE.child, None)))
- case _ => e
- }
- }
-
- def apply(e: Expression) = e match {
- case SelectExpression(aliasE@AliasExpression(_, _), selList, forGroupBy) => {
- idx = idx + 1
- SelectExpression(aliasE, selList.map(_.transformUp(new DecorateFieldWithAlias(aliasE))), forGroupBy)
- }
- case SelectExpression(child, selList, forGroupBy) => {
- idx = idx + 1
- val aliasE = AliasExpression(child, s"_src$idx")
- SelectExpression(aliasE, selList.map(_.transformUp(new DecorateFieldWithAlias(aliasE))), forGroupBy)
- }
- case OrderExpression(aliasE@AliasExpression(_, _), order, asc) => {
- OrderExpression(aliasE, order.transformUp(new DecorateFieldWithAlias(aliasE)),asc)
- }
- case OrderExpression(child, order, asc) => {
- idx = idx + 1
- val aliasE = AliasExpression(child, s"_src$idx")
- OrderExpression(aliasE, order.transformUp(new DecorateFieldWithAlias(aliasE)),asc)
- }
- case _ => e
- }
- }
-
- def getSelectExpressionSrc(e: Expression): List[String] = {
- val l = ArrayBuffer[String]()
- e.traverseUp {
- case BackReference(alias, _, _) => l += alias
- case ClassExpression(clsName) => l += clsName
- }
- l.toSet.toList
- }
-
- def validateSelectExprHaveOneSrc: PartialFunction[Expression, Unit] = {
- case SelectExpression(_, selList, forGroupBy) => {
- selList.foreach { se =>
- val srcs = getSelectExpressionSrc(se)
- if (srcs.size > 1) {
- throw new GremlinTranslationException(se, "Only one src allowed in a Select Expression")
- }
- }
- }
- }
-
- def groupSelectExpressionsBySrc(sel: SelectExpression): mutable.LinkedHashMap[String, List[Expression]] = {
- val m = mutable.LinkedHashMap[String, List[Expression]]()
- sel.selectListWithAlias.foreach { se =>
- val l = getSelectExpressionSrc(se.child)
- if (!m.contains(l(0))) {
- m(l(0)) = List()
- }
- m(l(0)) = m(l(0)) :+ se.child
- }
- m
- }
-
- /**
- * For each Output Column in the SelectExpression compute the ArrayList(Src) this maps to and the position within
- * this list.
- *
- * @param sel
- * @return
- */
- def buildResultMapping(sel: SelectExpression): Map[String, (String, Int)] = {
- val srcToExprs = groupSelectExpressionsBySrc(sel)
- val m = new mutable.HashMap[String, (String, Int)]
- sel.selectListWithAlias.foreach { se =>
- val src = getSelectExpressionSrc(se.child)(0)
- val srcExprs = srcToExprs(src)
- var idx = srcExprs.indexOf(se.child)
- m(se.alias) = (src, idx)
- }
- m.toMap
- }
-}
-
-class GremlinTranslationException(expr: Expression, reason: String) extends
-ExpressionException(expr, s"Unsupported Gremlin translation: $reason")
-
-class GremlinTranslator(expr: Expression,
- gPersistenceBehavior: GraphPersistenceStrategies)
- extends SelectExpressionHandling {
-
- val preStatements = ArrayBuffer[GroovyExpression]()
- val postStatements = ArrayBuffer[GroovyExpression]()
-
- val wrapAndRule: PartialFunction[Expression, Expression] = {
- case f: FilterExpression if ((!f.condExpr.isInstanceOf[LogicalExpression]) &&
- (f.condExpr.isInstanceOf[isTraitLeafExpression] || !f.namedExpressions.isEmpty)) =>
- FilterExpression(f.child, new LogicalExpression("and", List(f.condExpr)))
- }
-
- val validateComparisonForm: PartialFunction[Expression, Unit] = {
- case c@ComparisonExpression(op, left, right) =>
- if (!left.isInstanceOf[FieldExpression]) {
- throw new GremlinTranslationException(c, s"lhs of comparison is not a field")
- }
- if (!right.isInstanceOf[Literal[_]] && !right.isInstanceOf[ListLiteral[_]]) {
- throw new GremlinTranslationException(c,
- s"rhs of comparison is not a literal")
- }
-
- if(right.isInstanceOf[ListLiteral[_]] && (!op.equals("=") && !op.equals("!="))) {
- throw new GremlinTranslationException(c,
- s"operation not supported with list literal")
- }
- ()
- }
-
- val counter = new IntSequence {
- var i: Int = -1;
-
- def next: Int = {
- i += 1; i
- }
- }
-
- def addAliasToLoopInput(c: IntSequence = counter): PartialFunction[Expression, Expression] = {
- case l@LoopExpression(aliasE@AliasExpression(_, _), _, _) => l
- case l@LoopExpression(inputExpr, loopExpr, t) => {
- val aliasE = AliasExpression(inputExpr, s"_loop${c.next}")
- LoopExpression(aliasE, loopExpr, t)
- }
- }
-
- def instanceClauseToTop(topE : Expression) : PartialFunction[Expression, Expression] = {
- case le : LogicalExpression if (le fastEquals topE) => {
- le.instance()
- }
- case ce : ComparisonExpression if (ce fastEquals topE) => {
- ce.instance()
- }
- case he : hasFieldUnaryExpression if (he fastEquals topE) => {
- he.instance()
- }
- }
-
- def traitClauseWithInstanceForTop(topE : Expression) : PartialFunction[Expression, Expression] = {
-// This topE check prevented the comparison of trait expression when it is a child. Like trait as t limit 2
- case te : TraitExpression => {
- val theTrait = te.as("theTrait")
- val theInstance = theTrait.traitInstance().as("theInstance")
- val outE =
- theInstance.select(id("theTrait").as("traitDetails"),
- id("theInstance").as("instanceInfo"))
- QueryProcessor.validate(outE)
- }
- }
-
- def typeTestExpression(parent: GroovyExpression, typeName : String) : GroovyExpression = {
- val stats = GremlinExpressionFactory.INSTANCE.generateTypeTestExpression(gPersistenceBehavior, parent, typeName, counter)
-
- preStatements ++= stats.init
- stats.last
-
- }
-
- val QUOTE = "\"";
-
- private def cleanStringLiteral(l : Literal[_]) : String = {
- return l.toString.stripPrefix(QUOTE).stripSuffix(QUOTE);
- }
-
-
- private def genQuery(parent: GroovyExpression, expr: Expression, inClosure: Boolean): GroovyExpression = expr match {
- case ClassExpression(clsName) => typeTestExpression(parent, clsName)
- case TraitExpression(clsName) => typeTestExpression(parent, clsName)
- case fe@FieldExpression(fieldName, fInfo, child)
- if fe.dataType.getTypeCategory == TypeCategory.PRIMITIVE || fe.dataType.getTypeCategory == TypeCategory.ARRAY => {
- val fN = gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo)
- val childExpr = translateOptChild(parent, child, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateFieldExpression(childExpr, fInfo, fN, inClosure);
- }
- case fe@FieldExpression(fieldName, fInfo, child)
- if fe.dataType.getTypeCategory == TypeCategory.CLASS || fe.dataType.getTypeCategory == TypeCategory.STRUCT => {
- val childExpr = translateOptChild(parent, child, inClosure);
- val direction = if (fInfo.isReverse) AtlasEdgeDirection.IN else AtlasEdgeDirection.OUT
- val edgeLbl = gPersistenceBehavior.edgeLabel(fInfo)
- return GremlinExpressionFactory.INSTANCE.generateAdjacentVerticesExpression(childExpr, direction, edgeLbl)
-
- }
- case fe@FieldExpression(fieldName, fInfo, child) if fInfo.traitName != null => {
- val childExpr = translateOptChild(parent, child, inClosure);
- val direction = gPersistenceBehavior.instanceToTraitEdgeDirection
- val edgeLbl = gPersistenceBehavior.edgeLabel(fInfo)
- return GremlinExpressionFactory.INSTANCE.generateAdjacentVerticesExpression(childExpr, direction, edgeLbl)
-
- }
- case c@ComparisonExpression(symb, f@FieldExpression(fieldName, fInfo, ch), l) => {
- val qualifiedPropertyName = s"${gPersistenceBehavior.fieldNameInVertex(fInfo.dataType, fInfo.attrInfo)}"
-
- val childExpr = translateOptChild(parent, ch, inClosure)
- val persistentExprValue : GroovyExpression = if(l.isInstanceOf[Literal[_]]) {
- translateLiteralValue(fInfo.attrInfo.dataType, l.asInstanceOf[Literal[_]]);
- }
- else {
- genQuery(null, l, inClosure);
- }
-
- if (symb == "like") {
- return GremlinExpressionFactory.INSTANCE.generateLikeExpressionUsingFilter(childExpr, qualifiedPropertyName, persistentExprValue);
- }
-
- return GremlinExpressionFactory.INSTANCE.generateHasExpression(gPersistenceBehavior, childExpr, qualifiedPropertyName, c.symbol, persistentExprValue, fInfo);
- }
- case fil@FilterExpression(child, condExpr) => {
- var newParent = genQuery(parent, child, inClosure);
- val alias = "a" + counter.next;
- newParent = GremlinExpressionFactory.INSTANCE.generateAliasExpression(newParent, alias);
- val translated = genQuery(newParent, condExpr, inClosure);
- //we want the query to return instances of the class whose instances we are filtering out
- //The act of filtering may traverse edges and have other side effects.
- GremlinExpressionFactory.INSTANCE.generateBackReferenceExpression(translated, false, alias);
- }
- case l@LogicalExpression(symb, children) => {
- val translatedChildren : java.util.List[GroovyExpression] = translateList(children, false);
- return GremlinExpressionFactory.INSTANCE.generateLogicalExpression(parent, symb, translatedChildren);
- }
- case sel@SelectExpression(child, selList, forGroupBy) => {
- val m = groupSelectExpressionsBySrc(sel)
- var srcNamesList: java.util.List[LiteralExpression] = new ArrayList()
- var srcExprsList: List[java.util.List[GroovyExpression]] = List()
- val it = m.iterator
-
- while (it.hasNext) {
- val (src, selExprs) = it.next
- srcNamesList.add(new LiteralExpression(src));
- val translatedSelExprs : java.util.List[GroovyExpression] = translateList(selExprs, true);
- srcExprsList = srcExprsList :+ translatedSelExprs
- }
- val srcExprsStringList : java.util.List[GroovyExpression] = new ArrayList();
- srcExprsList.foreach { it =>
- srcExprsStringList.add(new ListExpression(it));
- }
-
- val childExpr = genQuery(parent, child, inClosure)
- return GremlinExpressionFactory.INSTANCE.generateSelectExpression(childExpr, srcNamesList, srcExprsStringList);
-
- }
- case loop@LoopExpression(input, loopExpr, t) => {
-
- val times : Integer = if(t.isDefined) {
- t.get.rawValue.asInstanceOf[Integer]
- }
- else {
- null.asInstanceOf[Integer]
- }
- val alias = input.asInstanceOf[AliasExpression].alias;
- val inputQry = genQuery(parent, input, inClosure)
- val translatedLoopExpr = genQuery(GremlinExpressionFactory.INSTANCE.getLoopExpressionParent(inputQry), loopExpr, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateLoopExpression(inputQry, gPersistenceBehavior, input.dataType, translatedLoopExpr, alias, times);
- }
- case BackReference(alias, _, _) => {
-
- return GremlinExpressionFactory.INSTANCE.generateBackReferenceExpression(parent, inClosure, alias);
- }
- case AliasExpression(child, alias) => {
- var childExpr = genQuery(parent, child, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateAliasExpression(childExpr, alias);
- }
- case isTraitLeafExpression(traitName, Some(clsExp)) => {
- val label = gPersistenceBehavior.traitLabel(clsExp.dataType, traitName);
- return GremlinExpressionFactory.INSTANCE.generateAdjacentVerticesExpression(parent, AtlasEdgeDirection.OUT, label);
- }
- case isTraitUnaryExpression(traitName, child) => {
- val label = gPersistenceBehavior.traitLabel(child.dataType, traitName);
- return GremlinExpressionFactory.INSTANCE.generateAdjacentVerticesExpression(parent, AtlasEdgeDirection.OUT, label);
- }
- case hasFieldLeafExpression(fieldName, clsExp) => clsExp match {
- case None => GremlinExpressionFactory.INSTANCE.generateUnaryHasExpression(parent, fieldName)
- case Some(x) => {
- val fi = TypeUtils.resolveReference(clsExp.get.dataType, fieldName);
- if(! fi.isDefined) {
- return GremlinExpressionFactory.INSTANCE.generateUnaryHasExpression(parent, fieldName);
- }
- else {
- val fName = gPersistenceBehavior.fieldNameInVertex(fi.get.dataType, fi.get.attrInfo)
- return GremlinExpressionFactory.INSTANCE.generateUnaryHasExpression(parent, fName);
- }
- }
- }
- case hasFieldUnaryExpression(fieldName, child) =>
- val childExpr = genQuery(parent, child, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateUnaryHasExpression(childExpr, fieldName);
- case ArithmeticExpression(symb, left, right) => {
- val leftExpr = genQuery(parent, left, inClosure);
- val rightExpr = genQuery(parent, right, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateArithmeticExpression(leftExpr, symb, rightExpr);
- }
- case l: Literal[_] => {
-
- if(parent != null) {
- return new org.apache.atlas.groovy.FieldExpression(parent, cleanStringLiteral(l));
- }
- return translateLiteralValue(l.dataType, l);
- }
- case list: ListLiteral[_] => {
- //Here, we are creating a Groovy list literal expression ([value1, value2, value3]). Because
- //of this, any gremlin query expressions within the list must start with an anonymous traversal.
- //We set 'inClosure' to true in this case to make that happen.
- val values : java.util.List[GroovyExpression] = translateList(list.rawValue, true);
- return new ListExpression(values);
- }
- case in@TraitInstanceExpression(child) => {
- val childExpr = genQuery(parent, child, inClosure);
- val direction = gPersistenceBehavior.traitToInstanceEdgeDirection;
- return GremlinExpressionFactory.INSTANCE.generateAdjacentVerticesExpression(childExpr, direction);
- }
- case in@InstanceExpression(child) => {
- return genQuery(parent, child, inClosure);
- }
- case pe@PathExpression(child) => {
- val childExpr = genQuery(parent, child, inClosure)
- return GremlinExpressionFactory.INSTANCE.generatePathExpression(childExpr);
- }
- case order@OrderExpression(child, odr, asc) => {
- var orderExpression = odr
- if(odr.isInstanceOf[BackReference]) {
- orderExpression = odr.asInstanceOf[BackReference].reference
- }
- else if (odr.isInstanceOf[AliasExpression]) {
- orderExpression = odr.asInstanceOf[AliasExpression].child
- }
-
- val childExpr = genQuery(parent, child, inClosure);
- var orderByParents : java.util.List[GroovyExpression] = GremlinExpressionFactory.INSTANCE.getOrderFieldParents();
-
- val translatedParents : java.util.List[GroovyExpression] = new ArrayList[GroovyExpression]();
- var translatedOrderParents = orderByParents.foreach { it =>
- translatedParents.add(genQuery(it, orderExpression, true));
- }
- return GremlinExpressionFactory.INSTANCE.generateOrderByExpression(childExpr, translatedParents,asc);
-
- }
- case limitOffset@LimitExpression(child, limit, offset) => {
- val childExpr = genQuery(parent, child, inClosure);
- val totalResultRows = limit.value + offset.value;
- return GremlinExpressionFactory.INSTANCE.generateRangeExpression(childExpr, offset.value, totalResultRows);
- }
- case count@CountExpression() => {
- val listExpr = GremlinExpressionFactory.INSTANCE.getClosureArgumentValue();
- GremlinExpressionFactory.INSTANCE.generateCountExpression(listExpr);
- }
- case max@MaxExpression(child) => {
- //use "it" as the parent since the child will become
- //part of a closure. Its value will be whatever vertex
- //we are looking at in the collection.
- val childExprParent = null;
- val childExpr = genQuery(childExprParent, child, true);
- val listExpr = GremlinExpressionFactory.INSTANCE.getClosureArgumentValue();
- GremlinExpressionFactory.INSTANCE.generateMaxExpression(listExpr, childExpr);
- }
- case min@MinExpression(child) => {
- //use "it" as the parent since the child will become
- //part of a closure. Its value will be whatever vertex
- //we are looking at in the collection.
- val childExprParent = null;
- val childExpr = genQuery(childExprParent, child, true);
- val listExpr = GremlinExpressionFactory.INSTANCE.getClosureArgumentValue();
- GremlinExpressionFactory.INSTANCE.generateMinExpression(listExpr, childExpr);
- }
- case sum@SumExpression(child) => {
- //use "it" as the parent since the child will become
- //part of a closure. Its value will be whatever vertex
- //we are looking at in the collection.
- val childExprParent = null;
- val childExpr = genQuery(childExprParent, child, true);
- val listExpr = GremlinExpressionFactory.INSTANCE.getClosureArgumentValue();
- GremlinExpressionFactory.INSTANCE.generateSumExpression(listExpr, childExpr);
- }
- case groupByExpr@GroupByExpression(child, groupBy, selExpr) => {
- //remove aliases
- val groupByExprListToTranslate = (groupBy.asInstanceOf[SelectExpression]).selectListWithAlias.map {
- x => x.child;
- }
- val grpByExprsList = translateList(groupByExprListToTranslate, true);
- val groupByValue = new ListExpression(grpByExprsList);
-
- //reduction only aggregate methods are supported here as of now.(Max, Min, Count)
- //remove aliases
- val srcExprListToTranslate = selExpr.selectListWithAlias.map {
- x => x.child;
- }
- val srcExprsList = translateList(srcExprListToTranslate, true, true);
- val srcExprsStringList = new ListExpression(srcExprsList)
-
- val childExpr = genQuery(parent, child, inClosure);
- return GremlinExpressionFactory.INSTANCE.generateGroupByExpression(childExpr, groupByValue, srcExprsStringList);
- }
- case x => throw new GremlinTranslationException(x, "expression not yet supported")
- }
-
- def translateList(exprs : List[Expressions.Expression], inClosure : Boolean, inGroupBy: Boolean = false) : java.util.List[GroovyExpression] = {
- var parent = if (inGroupBy) {
- GremlinExpressionFactory.INSTANCE.getGroupBySelectFieldParent();
- }
-
- else if(inClosure) {
- null;
- }
- else {
- GremlinExpressionFactory.INSTANCE.getAnonymousTraversalExpression()
- }
- var result : java.util.List[GroovyExpression] = new java.util.ArrayList(exprs.size);
- exprs.foreach { it =>
- result.add(genQuery(parent, it, inClosure));
- }
- return result;
- }
-
- def translateOptChild(parent : GroovyExpression, child : Option[Expressions.Expression] , inClosure: Boolean) : GroovyExpression = child match {
-
- case Some(x) => genQuery(parent, x, inClosure)
- case None => parent
- }
-
- def translateLiteralValue(dataType: IDataType[_], l: Literal[_]): GroovyExpression = {
-
-
- if (dataType == DataTypes.DATE_TYPE) {
- try {
- //Accepts both date, datetime formats
- val dateStr = cleanStringLiteral(l)
- val dateVal = ISODateTimeFormat.dateOptionalTimeParser().parseDateTime(dateStr).getMillis
- return new LiteralExpression(dateVal)
- } catch {
- case pe: java.text.ParseException =>
- throw new GremlinTranslationException(l,
- "Date format " + l + " not supported. Should be of the format " +
- TypeSystem.getInstance().getDateFormat.toPattern);
- }
- }
- else if(dataType == DataTypes.BYTE_TYPE) {
- //cast needed, otherwise get class cast exception when trying to compare, since the
- //persist value is assumed to be an Integer
- return new CastExpression(new LiteralExpression(Byte.valueOf(s"""${l}"""), true),"byte");
- }
- else if(dataType == DataTypes.INT_TYPE) {
- return new LiteralExpression(Integer.valueOf(s"""${l}"""));
- }
- else if(dataType == DataTypes.BOOLEAN_TYPE) {
- return new LiteralExpression(Boolean.valueOf(s"""${l}"""));
- }
- else if(dataType == DataTypes.SHORT_TYPE) {
- return new CastExpression(new LiteralExpression(Short.valueOf(s"""${l}"""), true),"short");
- }
- else if(dataType == DataTypes.LONG_TYPE) {
- return new LiteralExpression(Long.valueOf(s"""${l}"""), true);
- }
- else if(dataType == DataTypes.FLOAT_TYPE) {
- return new LiteralExpression(Float.valueOf(s"""${l}"""), true);
- }
- else if(dataType == DataTypes.DOUBLE_TYPE) {
- return new LiteralExpression(Double.valueOf(s"""${l}"""), true);
- }
- else if(dataType == DataTypes.STRING_TYPE) {
- return new LiteralExpression(cleanStringLiteral(l));
- }
- else {
- return new LiteralExpression(l.rawValue);
- }
- }
-
- def genFullQuery(expr: Expression, hasSelect: Boolean): String = {
-
- var q : GroovyExpression = new FunctionCallExpression(TraversalStepType.START, new IdentifierExpression(TraversalStepType.SOURCE, "g"),"V");
-
- val debug:Boolean = false
- if(gPersistenceBehavior.addGraphVertexPrefix(preStatements)) {
- q = gPersistenceBehavior.addInitialQueryCondition(q);
- }
-
- q = genQuery(q, expr, false)
-
- q = GremlinExpressionFactory.INSTANCE.generateToListExpression(q);
- q = gPersistenceBehavior.getGraph().addOutputTransformationPredicate(q, hasSelect, expr.isInstanceOf[PathExpression]);
-
-
- if(AtlasRepositoryConfiguration.isGremlinOptimizerEnabled()) {
- q = GremlinQueryOptimizer.getInstance().optimize(q);
- }
-
- val closureExpression = new ClosureExpression();
-
- closureExpression.addStatements(preStatements);
- closureExpression.addStatement(q)
- closureExpression.addStatements(postStatements);
-
- val overallExpression = new LabeledExpression("L", closureExpression);
-
- val qryStr = generateGremlin(overallExpression);
-
- if(debug) {
- println(" query " + qryStr)
- }
-
- qryStr;
-
- }
-
- def generateGremlin(expr: GroovyExpression) : String = {
- val ctx : GroovyGenerationContext = new GroovyGenerationContext();
- ctx.setParametersAllowed(false);
- expr.generateGroovy(ctx);
- return ctx.getQuery;
- }
-
- def translate(): GremlinQuery = {
- var e1 = expr.transformUp(wrapAndRule)
-
- e1.traverseUp(validateComparisonForm)
- e1 = e1.transformUp(AddBackReferencesToGroupBy)
- e1 = e1.transformUp(new AddAliasToSelectInput)
- e1.traverseUp(validateSelectExprHaveOneSrc)
- e1 = e1.transformUp(addAliasToLoopInput())
- e1 = e1.transformUp(instanceClauseToTop(e1))
- e1 = e1.transformUp(traitClauseWithInstanceForTop(e1))
- e1 = e1.transformUp(RemoveUnneededBackReferences)
-
- //Following code extracts the select expressions from expression tree.
-
- val se = SelectExpressionHelper.extractSelectExpression(e1)
- if (se.isDefined) {
- val rMap = buildResultMapping(se.get)
- GremlinQuery(e1, genFullQuery(e1, true), rMap)
- } else {
- GremlinQuery(e1, genFullQuery(e1, false), null)
- }
-
- }
-}
- object SelectExpressionHelper {
- /**
- * This method extracts the child select expression from parent expression
- */
- def extractSelectExpression(child: Expression): Option[SelectExpression] = {
- child match {
- case se@SelectExpression(child, selectList, false) =>{
- Some(se)
- }
- case limit@LimitExpression(child, lmt, offset) => {
- extractSelectExpression(child)
- }
- case order@OrderExpression(child, odr, odrBy) => {
- extractSelectExpression(child)
- }
- case path@PathExpression(child) => {
- extractSelectExpression(child)
- }
- case _ => {
- None
- }
-
- }
- }
- }
- /*
- * TODO
- * Translation Issues:
- * 1. back references in filters. For e.g. testBackreference: 'DB as db Table where (db.name = "Reporting")'
- * this is translated to:
- * g.V.has("typeName","DB").as("db").in("Table.db").and(_().back("db").has("name", T.eq, "Reporting")).map().toList()
- * But the '_().back("db") within the and is ignored, the has condition is applied on the current element.
- * The solution is to to do predicate pushdown and apply the filter immediately on top of the referred Expression.
- */
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala b/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
deleted file mode 100755
index 4bc6e74..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/QueryParser.scala
+++ /dev/null
@@ -1,557 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.query.Expressions._
-
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers}
-import scala.util.parsing.input.CharArrayReader._
-import org.apache.atlas.AtlasException
-import org.apache.atlas.typesystem.types.DataTypes
-
-trait QueryKeywords {
- this: StandardTokenParsers =>
-
- import scala.language.implicitConversions
-
- protected case class Keyword(str: String)
-
- protected implicit def asParser(k: Keyword): Parser[String] = k.str
-
- protected val LIST_LPAREN = Keyword("[")
- protected val LIST_RPAREN = Keyword("]")
- protected val LPAREN = Keyword("(")
- protected val RPAREN = Keyword(")")
- protected val EQ = Keyword("=")
- protected val LT = Keyword("<")
- protected val GT = Keyword(">")
- protected val NEQ = Keyword("!=")
- protected val LTE = Keyword("<=")
- protected val GTE = Keyword(">=")
- protected val COMMA = Keyword(",")
- protected val AND = Keyword("and")
- protected val OR = Keyword("or")
- protected val PLUS = Keyword("+")
- protected val MINUS = Keyword("-")
- protected val STAR = Keyword("*")
- protected val DIV = Keyword("/")
- protected val DOT = Keyword(".")
-
- protected val SELECT = Keyword("select")
- protected val FROM = Keyword("from")
- protected val WHERE = Keyword("where")
- protected val GROUPBY = Keyword("groupby")
- protected val LOOP = Keyword("loop")
- protected val ISA = Keyword("isa")
- protected val IS = Keyword("is")
- protected val HAS = Keyword("has")
- protected val AS = Keyword("as")
- protected val TIMES = Keyword("times")
- protected val WITHPATH = Keyword("withPath")
- protected val LIMIT = Keyword("limit")
- protected val OFFSET = Keyword("offset")
- protected val ORDERBY = Keyword("orderby")
- protected val COUNT = Keyword("count")
- protected val MAX = Keyword("max")
- protected val MIN = Keyword("min")
- protected val SUM = Keyword("sum")
- protected val BY = Keyword("by")
- protected val ORDER = Keyword("order")
- protected val LIKE = Keyword("like")
-}
-
-trait ExpressionUtils {
-
- protected val DESC = "desc"
- def loop(input: Expression, l: (Expression, Option[Literal[Integer]], Option[String])) = l match {
- case (c, None, None) => input.loop(c)
- case (c, t, None) => input.loop(c, t.get)
- case (c, None, Some(a)) => input.loop(c).as(a)
- case (c, t, Some(a)) => input.loop(c, t.get).as(a)
- }
-
- def select(input: Expression, s: List[(Expression, Option[String])], forGroupBy: Boolean = false) = {
- val selList = s.map { t =>
- t._2 match {
- case None => t._1.as(s"${t._1}")
- case _ => t._1.as(t._2.get)
- }
- }
- new SelectExpression(input, selList, forGroupBy)
- }
-
- def limit(input: Expression, lmt: Literal[Integer], offset: Literal[Integer]) = {
- input.limit(lmt, offset)
- }
-
- def order(input: Expression, odr: Expression, asc: Boolean) = {
- input.order(odr, asc)
- }
-
- def leftmostId(e: Expression) = {
- var le: IdExpression = null
- e.traverseUp { case i: IdExpression if le == null => le = i}
- le
- }
-
- def notIdExpression = new PartialFunction[Expression, Expression] {
- def isDefinedAt(x: Expression): Boolean = !x.isInstanceOf[IdExpression]
-
- def apply(e: Expression) = e
- }
-
- def replaceIdWithField(id: IdExpression, fe: UnresolvedFieldExpression): PartialFunction[Expression, Expression] = {
- case e: IdExpression if e == id => fe
- }
-
- def merge(snglQuery1: Expression, sngQuery2: Expression): Expression = {
- val leftSrcId = leftmostId(sngQuery2)
- sngQuery2.transformUp(replaceIdWithField(leftSrcId, snglQuery1.field(leftSrcId.name)))
- }
-
- def groupBy(input: Expression, groupByExpr: SelectExpression, selectExpr: SelectExpression) = {
- input.groupBy(groupByExpr, selectExpr)
- }
-}
-
-case class QueryParams(limit: Int, offset: Int)
-
-/**
- * Query parser is used to parse the DSL query. It uses scala PackratParsers and pattern matching to extract the expressions.
- * It builds up a expression tree.
- */
-object QueryParser extends StandardTokenParsers with QueryKeywords with ExpressionUtils with PackratParsers {
-
- import scala.language.higherKinds
-
- private val reservedWordsDelims: Seq[String] = this.
- getClass.getMethods.filter(_.getReturnType == classOf[Keyword]).map(_.invoke(this).asInstanceOf[Keyword].str)
-
- private val (queryreservedWords: Seq[String], querydelims: Seq[String]) =
- reservedWordsDelims.partition(s => s.charAt(0).isLetter)
-
- override val lexical = new QueryLexer(queryreservedWords, querydelims)
-
- /**
- * @param input query string
- * @param queryParams query parameters that contains limit and offset
- * @return
- */
- def apply(input: String)(implicit queryParams: QueryParams = null): Either[NoSuccess, Expression] = synchronized {
- phrase(queryWithPath)(new lexical.Scanner(input)) match {
- case Success(r, x) => Right(r)
- case f@Failure(m, x) => Left(f)
- case e@Error(m, x) => Left(e)
- }
- }
-
- import scala.math._
-
- def queryWithPath(implicit queryParams: QueryParams) = query ~ opt(WITHPATH) ^^ {
- case q ~ None => q
- case q ~ p => q.path()
- }
-
- /**
- * A singleQuery can have the following forms:
- * 1. SrcQuery [select] [orderby desc] [Limit x offset y] -> source query followed by optional select statement followed by optional order by followed by optional limit
- * eg: Select "hive_db where hive_db has name orderby 'hive_db.owner' limit 2 offset 1"
- *
- * @return
- */
- def query(implicit queryParams: QueryParams) = querySrc ~ opt(loopExpression) ~ opt(groupByExpr) ~ opt(selectClause) ~ opt(orderby) ~ opt(limitOffset) ^^ {
- case s ~ l ~ grp ~ sel ~ odr ~ lmtoff => {
- var expressiontree = s
- if (l.isDefined) //Note: The order of if statements is important.
- {
- expressiontree = loop(expressiontree, l.get);
- }
- if (odr.isDefined)
- {
- expressiontree = order(expressiontree, odr.get._1, odr.get._2)
- }
- if (queryParams != null && lmtoff.isDefined)
- {
- val mylimit = int(min(queryParams.limit, max(lmtoff.get._1 - queryParams.offset, 0)))
- val myoffset = int(queryParams.offset + lmtoff.get._2)
- expressiontree = limit(expressiontree, mylimit, myoffset)
- } else if(lmtoff.isDefined) {
- expressiontree = limit(expressiontree, int(lmtoff.get._1), int(lmtoff.get._2))
- } else if(queryParams != null) {
- expressiontree = limit(expressiontree, int(queryParams.limit), int(queryParams.offset))
- }
- if (grp.isDefined && sel.isDefined)
- {
-
- var child = expressiontree
- var selectExpr: SelectExpression = select(child, sel.get, true)
- var grpBySelectExpr: SelectExpression = select(child, grp.get, true)
- expressiontree = groupBy(child, grpBySelectExpr, selectExpr)
- }
- else if (grp.isDefined)
- {
- throw new AtlasException("groupby without select is not allowed");
- }
- else if (sel.isDefined)
- {
- var selectChild = expressiontree
- val selExpr : SelectExpression = select(selectChild, sel.get);
- if(selExpr.hasAggregation) {
- //In order to do the aggregation, we need to add an implicit group by. Having the
- //group by expression be a constant values forces all of the vertices into one group.
- val groupByConstant : Expression = Expressions.literal(DataTypes.STRING_TYPE, "dummy");
- val groupBySelExpr : SelectExpression = select(selectChild, sel.get, true);
- val groupByListExpr : SelectExpression = select(selectChild, List((groupByConstant,None)), true)
- expressiontree = groupBy(selectChild, groupByListExpr, groupBySelExpr)
- }
- else {
- expressiontree = selExpr
- }
- }
- expressiontree
- }
- }
-
- def querySrc: Parser[Expression] = rep1sep(singleQrySrc, opt(COMMA)) ^^ { l => l match {
- case h :: Nil => h
- case h :: t => t.foldLeft(h)(merge(_, _))
- case Nil => null
- }
- }
-
- /**
- * A SingleQuerySrc can have the following forms:
- * 1. FROM id [WHERE] [expr] -> from optionally followed by a filter
- * 2. WHERE expr -> where clause, FROM is assumed to be the leftmost Id in the where clause
- * 3. expr (that is not an IdExpression) -> where clause, FROM is assumed to be the leftmost Id in the expr
- * 4. Id [WHERE] [expr] -> from optionally followed by a filter
- *
- * @return
- */
- def singleQrySrc: Parser[Expression] = FROM ~ fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) ^^ {
- case f ~ i ~ w ~ None => i
- case f ~ i ~ w ~ c => i.where(c.get)
- } |
- WHERE ~ (expr ^? notIdExpression) ^^ { case w ~ e => {
- val lId = leftmostId(e)
- if (lId == null) {
- failure("cannot infer Input from the where clause")
- }
- lId.where(e)
- }
- } |
- expr ^? notIdExpression ^^ { case e => {
- val lId = leftmostId(e)
- if (lId == null) {
- failure("cannot infer Input from the where clause")
- }
- lId.where(e)
- }
- } |
- fromSrc ~ opt(WHERE) ~ opt(expr ^? notIdExpression) ^^ {
- case i ~ w ~ None => i
- case i ~ w ~ c => i.where(c.get)
- }
-
- def fromSrc = identifier ~ AS ~ alias ^^ { case s ~ a ~ al => s.as(al)} |
- identifier
-
- def orderby = (ORDERBY|(ORDER ~ BY )) ~ expr ~ opt (asce) ^^ {
- case o ~ odr ~ None => (odr, true)
- case o ~ odr ~ asc => (odr, asc.get)
- }
-
- def limitOffset: Parser[(Int, Int)] = LIMIT ~ lmt ~ opt (offset) ^^ {
- case l ~ lt ~ None => (lt.toInt, 0)
- case l ~ lt ~ of => (lt.toInt, of.get.toInt)
- }
-
- def offset = OFFSET ~ ofset ^^ {
- case offset ~ of => of
- }
-
- def asce = asc ^^ {
- case DESC => false
- case _ => true
- }
-
- def loopExpression(implicit queryParams: QueryParams): Parser[(Expression, Option[Literal[Integer]], Option[String])] =
- LOOP ~ (LPAREN ~> query <~ RPAREN) ~ opt(intConstant <~ TIMES) ~ opt(AS ~> alias) ^^ {
- case l ~ e ~ None ~ a => (e, None, a)
- case l ~ e ~ Some(i) ~ a => (e, Some(int(i)), a)
- }
-
- def selectClause: Parser[List[(Expression, Option[String])]] = SELECT ~ rep1sep(selectExpression, COMMA) ^^ {
- case s ~ cs => cs
- }
- def selectExpression: Parser[(Expression, Option[String])] = expr ~ opt(AS ~> alias) ^^ {
- case e ~ a => (e, a)
- }
-
- def expr: Parser[Expression] = compE ~ opt(rep(exprRight)) ^^ {
- case l ~ None => l
- case l ~ Some(r) => r.foldLeft(l) { (l, r) => l.logicalOp(r._1)(r._2)}
- }
-
- def exprRight = (AND | OR) ~ compE ^^ { case op ~ c => (op, c)}
-
- def compE =
- arithE ~ (LT | LTE | EQ | NEQ | GT | GTE | LIKE) ~ arithE ^^ { case l ~ op ~ r => l.compareOp(op)(r)} |
- arithE ~ (ISA | IS) ~ ident ^^ { case l ~ i ~ t => l.isTrait(t)} |
- arithE ~ HAS ~ ident ^^ { case l ~ i ~ f => l.hasField(f)} |
- arithE | countClause | maxClause | minClause | sumClause
-
- def arithE = multiE ~ opt(rep(arithERight)) ^^ {
- case l ~ None => l
- case l ~ Some(r) => r.foldLeft(l) { (l, r) => l.arith(r._1)(r._2)}
- }
-
- def arithERight = (PLUS | MINUS) ~ multiE ^^ { case op ~ r => (op, r)}
-
- def multiE = atomE ~ opt(rep(multiERight)) ^^ {
- case l ~ None => l
- case l ~ Some(r) => r.foldLeft(l) { (l, r) => l.arith(r._1)(r._2)}
- }
-
- def multiERight = (STAR | DIV) ~ atomE ^^ { case op ~ r => (op, r)}
-
- def atomE = literal | identifier | LPAREN ~> expr <~ RPAREN | listLiteral
-
- def listLiteral = LIST_LPAREN ~ rep1sep(literal, COMMA) ~ LIST_RPAREN ^^ {
- case lp ~ le ~ rp => list(le)
- }
-
- def identifier = rep1sep(ident, DOT) ^^ { l => l match {
-
- /*
- * We don't have enough context here to know what the id can be.
- * Examples:
- * Column isa PII - "Column" could be a field, type, or alias
- * name = 'John' - "name" must be a field.
- * Use generic id(), let type the be refined based on the context later.
- */
- case h :: Nil => id(h)
-
- /*
- * Then left-most part of the identifier ("h") must be a can be either. However,
- * Atlas does support struct attributes, whose fields must accessed through
- * this syntax. Let the downstream processing figure out which case we're in.
- *
- * Examples:
- * hive_table.name - here, hive_table must be a type
- * sortCol.order - here, sortCol is a struct attribute, must resolve to a field.
- */
- case h :: t => { //the left-most part of the identifier (h) can be
- t.foldLeft(id(h).asInstanceOf[Expression])(_.field(_))
- }
-
- case Nil => null
- }
- }
-
- def alias = ident | stringLit
-
- def lmt = intConstant
-
- def ofset = intConstant
-
- def asc = ident | stringLit
-
- def literal = booleanConstant ^^ {
- boolean(_)
- } |
- intConstant ^^ {
- int(_)
- } |
- longConstant ^^ {
- long(_)
- } |
- floatConstant ^^ {
- float(_)
- } |
- doubleConstant ^^ {
- double(_)
- } |
- stringLit ^^ {
- string(_)
- }
-
- def booleanConstant: Parser[String] =
- elem("int", _.isInstanceOf[lexical.BooleanLiteral]) ^^ (_.chars)
-
- def intConstant: Parser[String] =
- elem("int", _.isInstanceOf[lexical.IntLiteral]) ^^ (_.chars)
-
- def longConstant: Parser[String] =
- elem("int", _.isInstanceOf[lexical.LongLiteral]) ^^ (_.chars)
-
- def floatConstant: Parser[String] =
- elem("int", _.isInstanceOf[lexical.FloatLiteral]) ^^ (_.chars)
-
- def doubleConstant: Parser[String] =
- elem("int", _.isInstanceOf[lexical.DoubleLiteral]) ^^ (_.chars)
-
- def countClause = COUNT ~ LPAREN ~ RPAREN ^^ {
- case c => count()
- }
- def maxClause = MAX ~ (LPAREN ~> expr <~ RPAREN) ^^ {
- case m ~ e => maxExpr(e)
- }
- def minClause = MIN ~ (LPAREN ~> expr <~ RPAREN) ^^ {
- case m ~ e => minExpr(e)
- }
- def sumClause = SUM ~ (LPAREN ~> expr <~ RPAREN) ^^ {
- case m ~ e => sumExpr(e)
- }
- def groupByExpr = GROUPBY ~ (LPAREN ~> rep1sep(selectExpression, COMMA) <~ RPAREN) ^^ {
- case g ~ ce => ce
- }
-
- def isKeyword(s: String) = queryreservedWords.contains(s)
-}
-
-class QueryLexer(val keywords: Seq[String], val delims: Seq[String]) extends StdLexical with ImplicitConversions {
-
- case class BooleanLiteral(chars: String) extends Token {
- override def toString = chars
- }
-
- case class IntLiteral(chars: String) extends Token {
- override def toString = chars
- }
-
- case class LongLiteral(chars: String) extends Token {
- override def toString = chars
- }
-
- case class FloatLiteral(chars: String) extends Token {
- override def toString = chars
- }
-
- case class DoubleLiteral(chars: String) extends Token {
- override def toString = chars
- }
-
- reserved ++= keywords.flatMap(w => allCaseVersions(w))
-
- delimiters ++= delims
-
- override lazy val token: Parser[Token] =
- (
- (trueP | falseP)
- | longConstant ^^ LongLiteral
- | intConstant ^^ IntLiteral
- | floatConstant ^^ FloatLiteral
- | dubConstant ^^ DoubleLiteral
- | identifier ^^ processIdent
- | quotedIdentifier ^^ Identifier
- | string ^^ StringLit
- | EofCh ^^^ EOF
- | '\'' ~> failure("unclosed string literal")
- | '"' ~> failure("unclosed string literal")
- | delim
- | '.' ^^^ new Keyword(".")
- | failure("illegal character")
- )
-
- override def identChar = letter | elem('_')
-
- def identifier = identChar ~ (identChar | digit).* ^^ { case first ~ rest => (first :: rest).mkString}
-
- def quotedIdentifier = '`' ~> chrExcept('`', '\n', EofCh).* <~ '`' ^^ {
- _ mkString ""
- }
-
- override def whitespace: Parser[Any] =
- (whitespaceChar
- | '/' ~ '*' ~ comment
- | '/' ~ '/' ~ chrExcept(EofCh, '\n').*
- | '#' ~ chrExcept(EofCh, '\n').*
- | '/' ~ '*' ~ failure("unclosed comment")
- ).*
-
- protected override def comment: Parser[Any] = (
- commentChar.* ~ '*' ~ '/'
- )
-
- protected def commentChar = chrExcept(EofCh, '*') | '*' ~ not('/')
-
- def string = '\"' ~> chrExcept('\"', '\n', EofCh).* <~ '\"' ^^ {
- _ mkString ""
- } |
- '\'' ~> chrExcept('\'', '\n', EofCh).* <~ '\'' ^^ {
- _ mkString ""
- }
-
- def zero: Parser[String] = '0' ^^^ "0"
-
- def nonzero = elem("nonzero digit", d => d.isDigit && d != '0')
-
- def sign = elem("sign character", d => d == '-' || d == '+')
-
- def exponent = elem("exponent character", d => d == 'e' || d == 'E')
-
-
- def intConstant = opt(sign) ~> zero | intList
-
- def intList = opt(sign) ~ nonzero ~ rep(digit) ^^ { case s ~ x ~ y => (optString("", s) :: x :: y) mkString ""}
-
- def fracPart: Parser[String] = '.' ~> rep(digit) ^^ { r =>
- "." + (r mkString "")
- }
-
- def expPart = exponent ~ opt(sign) ~ rep1(digit) ^^ { case e ~ s ~ d =>
- e.toString + optString("", s) + d.mkString("")
- }
-
- def dubConstant = opt(sign) ~ digit.+ ~ fracPart ~ opt(expPart) ^^ {
- case s ~ i ~ f ~ e => {
- optString("", s) + (i mkString "") + f + optString("", e)
- }
- }
-
- def floatConstant = opt(sign) ~ digit.* ~ fracPart ~ 'f' ^^ { case s ~ i ~ fr ~ f =>
- optString("", s) + i + fr
- } | opt(sign) ~ digit.+ ~ opt(fracPart) ~ 'f' ^^ { case s ~ i ~ fr ~ f =>
- optString("", s) + i + optString("", fr)
- }
-
- def longConstant = intConstant ~ 'l' ^^ { case i ~ l => i}
-
- def trueP = 't' ~ 'r' ~ 'u' ~ 'e' ^^^ BooleanLiteral("true")
-
- def falseP = 'f' ~ 'a' ~ 'l' ~ 's' ~ 'e' ^^^ BooleanLiteral("false")
-
- private def optString[A](pre: String, a: Option[A]) = a match {
- case Some(x) => pre + x.toString
- case None => ""
- }
-
- /** Generate all variations of upper and lower case of a given string */
- def allCaseVersions(s: String, prefix: String = ""): Stream[String] = {
- if (s.isEmpty) {
- Stream(prefix)
- } else {
- allCaseVersions(s.tail, prefix + s.head.toLower) #:::
- allCaseVersions(s.tail, prefix + s.head.toUpper)
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/QueryProcessor.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/QueryProcessor.scala b/repository/src/main/scala/org/apache/atlas/query/QueryProcessor.scala
deleted file mode 100755
index e1e8408..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/QueryProcessor.scala
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.query.Expressions._
-import org.slf4j.{Logger, LoggerFactory}
-import org.apache.atlas.util.AtlasRepositoryConfiguration
-import org.apache.atlas.utils.LruCache
-import org.apache.atlas.util.CompiledQueryCacheKey
-import java.util.Collections
-
-object QueryProcessor {
- val LOG : Logger = LoggerFactory.getLogger("org.apache.atlas.query.QueryProcessor")
-
- val compiledQueryCache = Collections.synchronizedMap(new LruCache[CompiledQueryCacheKey, GremlinQuery](
- AtlasRepositoryConfiguration.getCompiledQueryCacheCapacity(),
- AtlasRepositoryConfiguration.getCompiledQueryCacheEvictionWarningThrottle()));
-
- def evaluate(e: Expression, g: AtlasGraph[_,_], gP : GraphPersistenceStrategies = null):
- GremlinQueryResult = {
-
- var strategy = gP;
- if(strategy == null) {
- strategy = GraphPersistenceStrategy1(g);
- }
-
- //convert the query expression to DSL so we can check whether or not it is in the compiled
- //query cache and avoid validating/translating it again if it is.
- val dsl = e.toString();
- val cacheKey = new CompiledQueryCacheKey(dsl);
- var q = compiledQueryCache.get(cacheKey);
- if(q == null) {
-
- //query was not found in the compiled query cache. Validate
- //and translate it, then cache the result.
-
- val e1 = validate(e)
- q = new GremlinTranslator(e1, strategy).translate()
- compiledQueryCache.put(cacheKey, q);
- if(LOG.isDebugEnabled()) {
- LOG.debug("Validated Query: " + e1)
- LOG.debug("Expression Tree:\n" + e1.treeString);
- }
- }
- if(LOG.isDebugEnabled()) {
- LOG.debug("DSL Query: " + dsl);
- LOG.debug("Gremlin Query: " + q.queryStr)
- }
- new GremlinEvaluator(q, strategy, g).evaluate()
- }
-
- def validate(e: Expression): Expression = {
-
- val e1 = e.transformUp(refineIdExpressionType);
- val e2 = e1.transformUp(new Resolver(None,e1.namedExpressions))
-
- e2.traverseUp {
- case x: Expression if !x.resolved =>
- throw new ExpressionException(x, s"Failed to resolved expression $x")
- }
-
- /*
- * trigger computation of dataType of expression tree
- */
- e2.dataType
-
- /*
- * ensure fieldReferences match the input expression's dataType
- */
- val e3 = e2.transformUp(FieldValidator)
- val e4 = e3.transformUp(new Resolver(None,e3.namedExpressions))
-
- e4.dataType
-
- e4
- }
-
- val convertToFieldIdExpression : PartialFunction[Expression,Expression] = {
- case IdExpression(name, IdExpressionType.Unresolved) => IdExpression(name, IdExpressionType.NonType);
- }
-
-
- //this function is called in a depth first manner on the expression tree to set the exprType in IdExpressions
- //when we know them. Since Expression classes are immutable, in order to do this we need to create new instances
- //of the case. The logic here enumerates the cases that have been identified where the given IdExpression
- //cannot resolve to a class or trait. This is the case in any places where a field value must be used.
- //For example, you cannot add two classes together or compare traits. Any IdExpressions in those contexts
- //refer to unqualified attribute names. On a similar note, select clauses need to product an actual value.
- //For example, in 'from DB select name' or 'from DB select name as n', name must be an attribute.
- val refineIdExpressionType : PartialFunction[Expression,Expression] = {
-
- //spit out the individual cases to minimize the object churn. Specifically, for ComparsionExpressions where neither
- //child is an IdExpression, there is no need to create a new ComparsionExpression object since neither child will
- //change. This applies to ArithmeticExpression as well.
- case c@ComparisonExpression(symbol, l@IdExpression(_,IdExpressionType.Unresolved) , r@IdExpression(_,IdExpressionType.Unresolved)) => {
- ComparisonExpression(symbol, convertToFieldIdExpression(l), convertToFieldIdExpression(r))
- }
- case c@ComparisonExpression(symbol, l@IdExpression(_,IdExpressionType.Unresolved) , r) => ComparisonExpression(symbol, convertToFieldIdExpression(l), r)
- case c@ComparisonExpression(symbol, l, r@IdExpression(_,IdExpressionType.Unresolved)) => ComparisonExpression(symbol, l, convertToFieldIdExpression(r))
-
- case e@ArithmeticExpression(symbol, l@IdExpression(_,IdExpressionType.Unresolved) , r@IdExpression(_,IdExpressionType.Unresolved)) => {
- ArithmeticExpression(symbol, convertToFieldIdExpression(l), convertToFieldIdExpression(r))
- }
- case e@ArithmeticExpression(symbol, l@IdExpression(_,IdExpressionType.Unresolved) , r) => ArithmeticExpression(symbol, convertToFieldIdExpression(l), r)
- case e@ArithmeticExpression(symbol, l, r@IdExpression(_,IdExpressionType.Unresolved)) => ArithmeticExpression(symbol, l, convertToFieldIdExpression(r))
-
- case s@SelectExpression(child, selectList, forGroupBy) => {
- var changed = false
- val newSelectList = selectList.map {
-
- expr => expr match {
- case e@IdExpression(_,IdExpressionType.Unresolved) => { changed=true; convertToFieldIdExpression(e) }
- case AliasExpression(child@IdExpression(_,IdExpressionType.Unresolved), alias) => {changed=true; AliasExpression(convertToFieldIdExpression(child), alias)}
- case x => x
- }
- }
- if(changed) {
- SelectExpression(child, newSelectList, forGroupBy)
- }
- else {
- s
- }
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/Resolver.scala b/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
deleted file mode 100755
index 1b42f3e..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/Resolver.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.query.Expressions._
-import org.apache.atlas.typesystem.types.IDataType
-import org.apache.atlas.typesystem.types.TraitType
-import org.apache.atlas.typesystem.types.ClassType
-class Resolver(srcExpr: Option[Expression] = None, aliases: Map[String, Expression] = Map(),
- connectClassExprToSrc: Boolean = false)
- extends PartialFunction[Expression, Expression] {
-
- import org.apache.atlas.query.TypeUtils._
-
- def isDefinedAt(x: Expression) = true
-
- def apply(e: Expression): Expression = e match {
- case idE@IdExpression(name, exprType) => {
-
- val backExpr = aliases.get(name)
- if (backExpr.isDefined) {
- if(backExpr.get.resolved) {
- return new BackReference(name, backExpr.get, None)
- }
- else {
- //replace once resolved
- return idE;
- }
- }
-
- if (srcExpr.isDefined) {
- val fInfo = resolveReference(srcExpr.get.dataType, name)
- if (fInfo.isDefined) {
- return new FieldExpression(name, fInfo.get, None)
- }
- }
-
- if(exprType.isTypeAllowed) {
- val dt = resolveAsDataType(name);
- if(dt.isDefined) {
- if(dt.get.isInstanceOf[ClassType]) {
- return new ClassExpression(name)
- }
- if(dt.get.isInstanceOf[TraitType]) {
- return new TraitExpression(name)
- }
- }
- }
- idE
- }
- case ce@ClassExpression(clsName) if connectClassExprToSrc && srcExpr.isDefined => {
- val fInfo = resolveReference(srcExpr.get.dataType, clsName)
- if (fInfo.isDefined) {
- return new FieldExpression(clsName, fInfo.get, None)
- }
- ce
- }
- case f@UnresolvedFieldExpression(child, fieldName) if child.resolved => {
- var fInfo: Option[FieldInfo] = None
-
- fInfo = resolveReference(child.dataType, fieldName)
- if (fInfo.isDefined) {
- return new FieldExpression(fieldName, fInfo.get, Some(child))
- }
- val tType = resolveAsTraitType(fieldName)
- if (tType.isDefined) {
- return new FieldExpression(fieldName, FieldInfo(child.dataType, null, null, fieldName), Some(child))
- }
- f
- }
- case isTraitLeafExpression(traitName, classExpression)
- if srcExpr.isDefined && !classExpression.isDefined =>
- isTraitLeafExpression(traitName, srcExpr)
- case hasFieldLeafExpression(traitName, classExpression)
- if srcExpr.isDefined && !classExpression.isDefined =>
- hasFieldLeafExpression(traitName, srcExpr)
- case f@FilterExpression(inputExpr, condExpr) if inputExpr.resolved => {
- val r = new Resolver(Some(inputExpr), inputExpr.namedExpressions)
- return new FilterExpression(inputExpr, condExpr.transformUp(r))
- }
- case SelectExpression(child, selectList, forGroupBy) if child.resolved => {
- val r = new Resolver(Some(child), child.namedExpressions)
- return new SelectExpression(child, selectList.map {
- _.transformUp(r)
- }, forGroupBy)
- }
- case l@LoopExpression(inputExpr, loopExpr, t) if inputExpr.resolved => {
- val r = new Resolver(Some(inputExpr), inputExpr.namedExpressions, true)
- return new LoopExpression(inputExpr, loopExpr.transformUp(r), t)
- }
- case lmt@LimitExpression(child, limit, offset) => {
- val r = new Resolver(Some(child), child.namedExpressions)
- return new LimitExpression(child.transformUp(r), limit, offset)
- }
- case order@OrderExpression(child, odr, asc) => {
- val r = new Resolver(Some(child), child.namedExpressions)
- return new OrderExpression(child, odr.transformUp(r), asc)
- }
- case x => x
- }
-}
-
-/**
- * - any FieldReferences that explicitly reference the input, can be converted to implicit references
- * - any FieldReferences that explicitly reference a
- */
-object FieldValidator extends PartialFunction[Expression, Expression] {
-
- def isDefinedAt(x: Expression) = true
-
- def isSrc(e: Expression) = e.isInstanceOf[ClassExpression] || e.isInstanceOf[TraitExpression]
-
- def validateQualifiedField(srcDataType: IDataType[_]): PartialFunction[Expression, Expression] = {
- case FieldExpression(fNm, fInfo, Some(child))
- if (child.children == Nil && !child.isInstanceOf[BackReference] && child.dataType == srcDataType) =>
- FieldExpression(fNm, fInfo, None)
- case fe@FieldExpression(fNm, fInfo, Some(child)) if isSrc(child) =>
- throw new ExpressionException(fe, s"srcType of field doesn't match input type")
- case hasFieldUnaryExpression(fNm, child) if child.dataType == srcDataType =>
- hasFieldLeafExpression(fNm, Some(child))
- case hF@hasFieldUnaryExpression(fNm, child) if isSrc(child) =>
- throw new ExpressionException(hF, s"srcType of field doesn't match input type")
- case isTraitUnaryExpression(fNm, child) if child.dataType == srcDataType =>
- isTraitLeafExpression(fNm)
- case iT@isTraitUnaryExpression(fNm, child) if isSrc(child) =>
- throw new ExpressionException(iT, s"srcType of field doesn't match input type")
- }
-
- def validateOnlyFieldReferencesInLoopExpressions(loopExpr: LoopExpression)
- : PartialFunction[Expression, Unit] = {
- case f: FieldExpression => ()
- case x => throw new ExpressionException(loopExpr,
- s"Loop Expression can only contain field references; '${x.toString}' not supported.")
- }
-
- def apply(e: Expression): Expression = e match {
- case f@FilterExpression(inputExpr, condExpr) => {
- val validatedCE = condExpr.transformUp(validateQualifiedField(inputExpr.dataType))
- if (validatedCE.fastEquals(condExpr)) {
- f
- } else {
- new FilterExpression(inputExpr, validatedCE)
- }
- }
- case SelectExpression(child, selectList, forGroupBy) if child.resolved => {
- val v = validateQualifiedField(child.dataType)
- return new SelectExpression(child, selectList.map {
- _.transformUp(v)
- }, forGroupBy)
- }
- case OrderExpression(child, order, asc) => {
- val v = validateQualifiedField(child.dataType)
- OrderExpression(child, order.transformUp(v), asc)
- }
- case l@LoopExpression(inputExpr, loopExpr, t) => {
- val validatedLE = loopExpr.transformUp(validateQualifiedField(inputExpr.dataType))
- val l1 = {
- if (validatedLE.fastEquals(loopExpr)) l
- else new LoopExpression(inputExpr, validatedLE, t)
- }
- l1.loopingExpression.traverseUp(validateOnlyFieldReferencesInLoopExpressions(l1))
- l1
- }
- case x => x
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/TypeUtils.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/TypeUtils.scala b/repository/src/main/scala/org/apache/atlas/query/TypeUtils.scala
deleted file mode 100755
index 8d2c7ae..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/TypeUtils.scala
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import java.util
-import java.util.concurrent.atomic.AtomicInteger
-
-import org.apache.atlas.AtlasException
-import org.apache.atlas.query.Expressions.{LimitExpression, PathExpression, SelectExpression}
-import org.apache.atlas.repository.Constants
-import org.apache.atlas.repository.graph.GraphHelper
-import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
-import org.apache.atlas.typesystem.types._
-
-object TypeUtils {
- val typSystem = TypeSystem.getInstance()
-
- def numericTypes : Seq[PrimitiveType[_]] = Seq(DataTypes.BYTE_TYPE,
- DataTypes.SHORT_TYPE,
- DataTypes.INT_TYPE,
- DataTypes.FLOAT_TYPE,
- DataTypes.LONG_TYPE,
- DataTypes.DOUBLE_TYPE,
- DataTypes.BIGINTEGER_TYPE,
- DataTypes.BIGDECIMAL_TYPE)
-
- def combinedType(typ1 : IDataType[_], typ2 : IDataType[_]) : PrimitiveType[_] = {
- val typ1Idx = if (numericTypes.contains(typ1)) Some(numericTypes.indexOf(typ1)) else None
- val typ2Idx = if (numericTypes.contains(typ2)) Some(numericTypes.indexOf(typ2)) else None
-
- if ( typ1Idx.isDefined && typ2Idx.isDefined ) {
- val rIdx = math.max(typ1Idx.get, typ2Idx.get)
-
- if ( (typ1 == DataTypes.FLOAT_TYPE && typ2 == DataTypes.LONG_TYPE) ||
- (typ1 == DataTypes.LONG_TYPE && typ2 == DataTypes.FLOAT_TYPE) ) {
- return DataTypes.DOUBLE_TYPE
- }
- return numericTypes(rIdx)
- }
-
- throw new AtlasException(s"Cannot combine types: ${typ1.getName} and ${typ2.getName}")
- }
-
- var tempStructCounter : AtomicInteger = new AtomicInteger(0)
- val TEMP_STRUCT_NAME_PREFIX = "__tempQueryResultStruct"
- def createStructType(selectExprs : List[Expressions.AliasExpression]) : StructType = {
- val aDefs = new Array[AttributeDefinition](selectExprs.size)
- selectExprs.zipWithIndex.foreach { t =>
- val (e,i) = t
- aDefs(i) = new AttributeDefinition(e.alias,e.dataType.getName, Multiplicity.OPTIONAL, false, null)
- }
- return typSystem.defineQueryResultType(s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}",
- null,
- aDefs:_*);
- }
-
- object ResultWithPathStruct {
- val pathAttrName = "path"
- val resultAttrName = "result"
- val pathAttrType = DataTypes.arrayTypeName(typSystem.getIdType.getStructType)
-
- val pathAttr = new AttributeDefinition(pathAttrName, pathAttrType, Multiplicity.COLLECTION, false, null)
-
- def createType(pE : PathExpression, resultType : IDataType[_]) : StructType = {
- val resultAttr = new AttributeDefinition(resultAttrName, resultType.getName, Multiplicity.REQUIRED, false, null)
- val typName = s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}"
- val m : java.util.HashMap[String, IDataType[_]] = new util.HashMap[String, IDataType[_]]()
- if (pE.child.isInstanceOf[SelectExpression] || pE.child.isInstanceOf[LimitExpression]) {
- m.put(pE.child.dataType.getName, pE.child.dataType)
- }
- typSystem.defineQueryResultType(typName, m, pathAttr, resultAttr);
- }
- }
-
- /**
- * Structure representing the Closure Graph.
- * Returns:
- * 1. A map of vertexId -> vertex Info(these are the attributes requested in the query)
- * 2. A edges map: each entry is a mapping from an vertexId to the List of adjacent vertexIds.
- *
- * '''The Vertex Map doesn't contain all the vertices in the Graph. Only the ones for which Attributes are
- * available.''' These are the vertices that represent the EntityType whose Closure was requested. For e.g. for
- * Table Lineage the ''vertex map'' will contain information about Tables, but not about ''Load Process'' vertices
- * that connect Tables.
- */
- object GraphResultStruct {
- val SRC_PREFIX = "src"
- val DEST_PREFIX = "dest"
-
- val verticesAttrName = "vertices"
- val edgesAttrName = "edges"
- val vertexIdAttrName = "vertexId"
-
- lazy val edgesAttrType = typSystem.defineMapType(DataTypes.STRING_TYPE,
- typSystem.defineArrayType(DataTypes.STRING_TYPE))
-
- def createType(resultWithPathType: StructType): StructType = {
- val resultType = resultWithPathType.fieldMapping().fields.get(ResultWithPathStruct.resultAttrName).dataType()
-
- val verticesAttrType = typSystem.defineMapType(DataTypes.STRING_TYPE,
- vertexType(resultType.asInstanceOf[StructType]))
- val typName = s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}"
- val verticesAttr = new AttributeDefinition(verticesAttrName, verticesAttrType.getName,
- Multiplicity.REQUIRED, false, null)
- val edgesAttr = new AttributeDefinition(edgesAttrName, edgesAttrType.getName, Multiplicity.REQUIRED, false, null)
-
- val m: java.util.HashMap[String, IDataType[_]] = new util.HashMap[String, IDataType[_]]()
- m.put(resultWithPathType.getName, resultWithPathType)
- m.put(resultType.getName, resultType)
- m.put(edgesAttrType.getName, edgesAttrType)
- m.put(verticesAttrType.getName, verticesAttrType)
- typSystem.defineQueryResultType(typName, m, verticesAttr, edgesAttr)
- }
-
- private def vertexType(resultType: StructType): StructType = {
-
- import scala.collection.JavaConverters._
-
- var attrs: List[AttributeDefinition] =
- resultType.fieldMapping.fields.asScala.filter(_._1.startsWith(s"${SRC_PREFIX}_")).mapValues { aInfo =>
-
- new AttributeDefinition(aInfo.name.substring(s"${SRC_PREFIX}_".length), aInfo.dataType.getName,
- aInfo.multiplicity, aInfo.isComposite, aInfo.reverseAttributeName)
- }.values.toList
-
- attrs = new AttributeDefinition(vertexIdAttrName, typSystem.getIdType.getStructType.name,
- Multiplicity.REQUIRED, false, null) :: attrs
-
- return typSystem.defineQueryResultType(s"${TEMP_STRUCT_NAME_PREFIX}${tempStructCounter.getAndIncrement}",
- null,
- attrs: _*)
- }
- }
-
- def fieldMapping(iDataType: IDataType[_]) : Option[FieldMapping] = iDataType match {
- case c : ClassType => Some(c.fieldMapping())
- case t : TraitType => Some(t.fieldMapping())
- case s : StructType => Some(s.fieldMapping())
- case _ => None
- }
-
- def hasFields(iDataType: IDataType[_]) : Boolean = {
- fieldMapping(iDataType).isDefined
- }
-
- import scala.language.existentials
- case class FieldInfo(dataType : IDataType[_],
- attrInfo : AttributeInfo,
- reverseDataType : IDataType[_] = null,
- traitName : String = null) {
- def isReverse = reverseDataType != null
- override def toString : String = {
- if ( traitName != null ) {
- s"""FieldInfo("${dataType.getName}", "$traitName")"""
- }
- else if ( reverseDataType == null ) {
- s"""FieldInfo("${dataType.getName}", "${attrInfo.name}")"""
- } else {
- s"""FieldInfo("${dataType.getName}", "${attrInfo.name}", "${reverseDataType.getName}")"""
- }
- }
- }
-
- val FIELD_QUALIFIER = "(.*?)(->.*)?".r
-
- /**
- * Given a ComposedType `t` and a name resolve using the following rules:
- * - if `id` is a field in `t` resolve to the field
- * - if `id` is the name of a Struct|Class|Trait Type and it has a field that is of type `t` then return that type
- *
- * For e.g.
- * 1. if we have types Table(name : String, cols : List[Column]), Column(name : String) then
- * `resolveReference(Table, "cols")` resolves to type Column. So a query can be "Table.cols"
- * 2. But if we have Table(name : String), Column(name : String, tbl : Table) then "Table.Column" will resolve
- * to type Column
- *
- * This way the language will support navigation even if the relationship is one-sided.
- *
- * @param typ
- * @param id
- * @return
- */
- def resolveReference(typ : IDataType[_], id : String) : Option[FieldInfo] = {
-
- val fMap = fieldMapping(typ)
- if ( fMap.isDefined) {
-
- if (fMap.get.fields.containsKey(id)) {
- return Some(FieldInfo(typ,fMap.get.fields.get(id)))
- }
-
- val systemField = GraphHelper.getAttributeInfoForSystemAttributes(id)
- if (systemField != null) {
- return Some(FieldInfo(systemField.dataType(), systemField))
- }
-
- try {
- val FIELD_QUALIFIER(clsNm, rest) = id
- val idTyp = typSystem.getDataType(classOf[IDataType[_]], clsNm)
- val idTypFMap = fieldMapping(idTyp)
-
- if (rest != null ) {
- val attrNm = rest.substring(2)
-
- if (idTypFMap.get.fields.containsKey(attrNm)) {
- return Some(FieldInfo(typ,idTypFMap.get.fields.get(attrNm), idTyp))
- }
- }
-
- if (idTypFMap.isDefined) {
- import scala.collection.JavaConversions._
- val fields: Seq[AttributeInfo] = idTypFMap.get.fields.values().filter { aInfo =>
- aInfo.dataType() == typ ||
- ( aInfo.dataType().getTypeCategory == TypeCategory.ARRAY &&
- aInfo.dataType().asInstanceOf[ArrayType].getElemType == typ
- )
- }.toSeq
- if (fields.size == 1) {
- return Some(FieldInfo(typ, fields(0), idTyp))
- }
- /*
- * is there only 1 array field of this type?
- * If yes resolve to it.
- * @todo: allow user to specify the relationship to follow by further qualifying the type. for e.g.
- * field("LoadProcess.inputTables")
- */
- val aFields = fields.filter { aInfo => aInfo.dataType().getTypeCategory == TypeCategory.ARRAY}
- if (aFields.size == 1) {
- return Some(FieldInfo(typ, aFields(0), idTyp))
- }
- }
- } catch {
- case _ : AtlasException => None
- }
- }
- None
- }
-
- def resolveAsDataType(id : String) : Option[IDataType[_]] = {
- try {
- Some(typSystem.getDataType(id))
- } catch {
- case _ : AtlasException => None
- }
-
- }
-
- def resolveAsClassType(id : String) : Option[ClassType] = {
- try {
- Some(typSystem.getDataType(classOf[ClassType], id))
- } catch {
- case _ : AtlasException => None
- }
- }
-
- def resolveAsTraitType(id : String) : Option[TraitType] = {
- try {
- Some(typSystem.getDataType(classOf[TraitType], id))
- } catch {
- case _ : AtlasException => None
- }
- }
-}
[33/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinQueryOptimizer.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinQueryOptimizer.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinQueryOptimizer.java
deleted file mode 100644
index a0c08fd..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/GremlinQueryOptimizer.java
+++ /dev/null
@@ -1,262 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.StatementListExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.annotations.VisibleForTesting;
-
-
-
-/**
- * Optimizer for gremlin queries. This class provides a framework for applying optimizations
- * to gremlin queries. Each optimization is implemented as a class that implements {@link GremlinOptimization}.
- *
- * The GremlinQueryOptimizer is the entry point for applying these optimizations.
- *
- *
- */
-public final class GremlinQueryOptimizer {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(GremlinQueryOptimizer.class);
-
-
- private final List<GremlinOptimization> optimizations = new ArrayList<>();
-
- //Allows expression factory to be substituted in unit tests.
- private static volatile GremlinExpressionFactory FACTORY = GremlinExpressionFactory.INSTANCE;
-
- private static volatile GremlinQueryOptimizer INSTANCE = null;
-
- private GremlinQueryOptimizer() {
-
- }
-
- private void addOptimization(GremlinOptimization opt) {
- optimizations.add(opt);
- }
-
- public static GremlinQueryOptimizer getInstance() {
- if(INSTANCE == null) {
- synchronized(GremlinQueryOptimizer.class) {
- if(INSTANCE == null) {
- GremlinQueryOptimizer createdInstance = new GremlinQueryOptimizer();
- //The order here is important. If there is an "or" nested within an "and",
- //that will not be found if ExpandOrsOptimization runs before ExpandAndsOptimization.
- createdInstance.addOptimization(new ExpandAndsOptimization(FACTORY));
- createdInstance.addOptimization(new ExpandOrsOptimization(FACTORY));
- INSTANCE = createdInstance;
- }
- }
- }
- return INSTANCE;
- }
-
- /**
- * For testing only
- */
- @VisibleForTesting
- public static void setExpressionFactory(GremlinExpressionFactory factory) {
- GremlinQueryOptimizer.FACTORY = factory;
- }
-
- /**
- * For testing only
- */
- @VisibleForTesting
- public static void reset() {
- INSTANCE = null;
- }
-
- /**
- * Optimizes the provided groovy expression. Note that the optimization
- * is a <i>destructive</i> process. The source GroovyExpression will be
- * modified as part of the optimization process. This is done to avoid
- * expensive copying operations where possible.
- *
- * @param source what to optimize
- * @return the optimized query
- */
- public GroovyExpression optimize(GroovyExpression source) {
- LOGGER.debug("Optimizing gremlin query: " + source);
- OptimizationContext context = new OptimizationContext();
- GroovyExpression updatedExpression = source;
- for (GremlinOptimization opt : optimizations) {
- updatedExpression = optimize(updatedExpression, opt, context);
- LOGGER.debug("After "+ opt.getClass().getSimpleName() + ", query = " + updatedExpression);
- }
-
- StatementListExpression result = new StatementListExpression();
- result.addStatements(context.getInitialStatements());
- result.addStatement(updatedExpression);
- LOGGER.debug("Final optimized query: " + result.toString());
- return result;
- }
-
- /**
- * Optimizes the expression using the given optimization
- * @param source
- * @param optimization
- * @param context
- * @return
- */
- private GroovyExpression optimize(GroovyExpression source, GremlinOptimization optimization,
- OptimizationContext context) {
- GroovyExpression result = source;
- if (optimization.appliesTo(source, context)) {
- //Apply the optimization to the expression.
- result = optimization.apply(source, context);
- }
- if (optimization.isApplyRecursively()) {
- //Visit the children, update result with the optimized
- //children.
- List<GroovyExpression> updatedChildren = new ArrayList<>();
- boolean changed = false;
- for (GroovyExpression child : result.getChildren()) {
- //Recursively optimize this child.
- GroovyExpression updatedChild = optimize(child, optimization, context);
- changed |= updatedChild != child;
- updatedChildren.add(updatedChild);
- }
- if (changed) {
- //TBD - Can we update in place rather than making a copy?
- result = result.copy(updatedChildren);
- }
- }
- return result;
- }
-
- /**
- * Visits all expressions in the call hierarchy of an expression. For example,
- * in the expression g.V().has('x','y'), the order would be
- * <ol>
- * <li>pre-visit has('x','y')</li>
- * <li>pre-visit V()</li>
- * <li>visit g (non-function caller)</li>
- * <li>post-visit V()</li>
- * <li>post-visit has('x','y')</li>
- * </ol>
- * @param expr
- * @param visitor
- */
- public static void visitCallHierarchy(GroovyExpression expr, CallHierarchyVisitor visitor) {
-
- if (expr == null) {
- visitor.visitNullCaller();
- return;
- }
- if (expr instanceof AbstractFunctionExpression) {
- AbstractFunctionExpression functionCall = (AbstractFunctionExpression)expr;
- if (!visitor.preVisitFunctionCaller(functionCall)) {
- return;
- }
- GroovyExpression caller = functionCall.getCaller();
- visitCallHierarchy(caller, visitor);
- if (!visitor.postVisitFunctionCaller(functionCall)) {
- return;
- }
- } else {
- visitor.visitNonFunctionCaller(expr);
- }
- }
-
- /**
- * Determines if the given expression is an "or" expression.
- * @param expr
- * @return
- */
- public static boolean isOrExpression(GroovyExpression expr) {
- return IsOr.INSTANCE.apply(expr);
- }
-
- /**
- * Determines whether the given expression can safely
- * be pulled out of an and/or expression.
- *
- * @param expr an argument to an and or or function
- * @return
- */
- public static boolean isExtractable(GroovyExpression expr) {
-
- HasForbiddenType hasForbiddenTypePredicate = new HasForbiddenType(FACTORY);
-
- //alias could conflict with alias in parent traversal
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.SIDE_EFFECT);
-
- //inlining out(), in() steps will change the result of calls after the and/or()
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.FLAT_MAP_TO_ELEMENTS);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.FLAT_MAP_TO_VALUES);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.BARRIER);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.MAP_TO_ELEMENT);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.MAP_TO_VALUE);
-
- //caller expects to be able to continue the traversal. We can't end it
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.END);
-
-
- //we can't inline child traversals
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.SOURCE);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.START);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.SIDE_EFFECT);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.NONE);
- hasForbiddenTypePredicate.addForbiddenType(TraversalStepType.BRANCH);
-
- ExpressionFinder forbiddenExpressionFinder = new ExpressionFinder(hasForbiddenTypePredicate);
- GremlinQueryOptimizer.visitCallHierarchy(expr, forbiddenExpressionFinder);
- return ! forbiddenExpressionFinder.isExpressionFound();
- }
-
- /**
- * Recursively copies and follows the caller hierarchy of the expression until we come
- * to a function call with a null caller. The caller of that expression is set
- * to newLeaf.
- *
- * @param expr
- * @param newLeaf
- * @return the updated (/copied) expression
- */
- public static GroovyExpression copyWithNewLeafNode(AbstractFunctionExpression expr, GroovyExpression newLeaf) {
-
-
- AbstractFunctionExpression result = (AbstractFunctionExpression)expr.copy();
-
- //remove leading anonymous traversal expression, if there is one
- if(FACTORY.isLeafAnonymousTraversalExpression(expr)) {
- result = (AbstractFunctionExpression)newLeaf;
- } else {
- GroovyExpression newCaller = null;
- if (expr.getCaller() == null) {
- newCaller = newLeaf;
- } else {
- newCaller = copyWithNewLeafNode((AbstractFunctionExpression)result.getCaller(), newLeaf);
- }
- result.setCaller(newCaller);
- }
- return result;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/HasForbiddenType.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/HasForbiddenType.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/HasForbiddenType.java
deleted file mode 100644
index 3fb9faa..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/HasForbiddenType.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.HashSet;
-import java.util.Set;
-import com.google.common.base.Function;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-
-/**
- * Function that tests whether the expression is an 'or'
- * graph traversal function.
- */
-public final class HasForbiddenType implements Function<GroovyExpression, Boolean> {
-
- private Set<TraversalStepType> forbiddenTypes = new HashSet<>();
- private final GremlinExpressionFactory factory;
-
- public HasForbiddenType(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- public void addForbiddenType(TraversalStepType type) {
- forbiddenTypes.add(type);
- }
-
- @Override
- public Boolean apply(GroovyExpression expr) {
- if(factory.isLeafAnonymousTraversalExpression(expr)) {
- return false;
- }
- return forbiddenTypes.contains(expr.getType());
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOr.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOr.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOr.java
deleted file mode 100644
index ab74087..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOr.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import com.google.common.base.Function;
-
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-
-/**
- * Function that tests whether the expression is an 'or'
- * graph traversal function.
- */
-public final class IsOr implements Function<GroovyExpression, Boolean> {
-
- public static final IsOr INSTANCE = new IsOr();
-
- private IsOr() {
- }
-
- @Override
- public Boolean apply(GroovyExpression expr) {
- if (!(expr instanceof FunctionCallExpression)) {
- return false;
- }
- if (expr.getType() != TraversalStepType.FILTER) {
- return false;
- }
- FunctionCallExpression functionCall = (FunctionCallExpression)expr;
- return functionCall.getFunctionName().equals("or");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOrParent.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOrParent.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOrParent.java
deleted file mode 100644
index 72085d0..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/IsOrParent.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import com.google.common.base.Function;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-
-/**
- * Matches an expression that gets called after calling or(). For example,
- * in g.V().or(x,y).toList(), "toList()" is the "or parent", so calling
- * "apply()" on this expression would return true and calling it on all
- * the other ones would return false.
- */
-public final class IsOrParent implements Function<GroovyExpression, Boolean> {
-
- public static final IsOrParent INSTANCE = new IsOrParent();
-
- private IsOrParent() {
-
- }
-
- @Override
- public Boolean apply(GroovyExpression expr) {
- if (!(expr instanceof AbstractFunctionExpression)) {
- return false;
- }
- AbstractFunctionExpression functionCall = (AbstractFunctionExpression)expr;
- GroovyExpression target = functionCall.getCaller();
-
- if (!(target instanceof FunctionCallExpression)) {
- return false;
- }
-
- if (target.getType() != TraversalStepType.FILTER) {
- return false;
- }
-
- FunctionCallExpression targetFunction = (FunctionCallExpression)target;
- return targetFunction.getFunctionName().equals("or");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OptimizationContext.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OptimizationContext.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OptimizationContext.java
deleted file mode 100644
index 86c8b98..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OptimizationContext.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.ClosureExpression.VariableDeclaration;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-import org.apache.atlas.groovy.ListExpression;
-import org.apache.atlas.groovy.TypeCoersionExpression;
-import org.apache.atlas.groovy.VariableAssignmentExpression;
-
-/**
- * Maintains state information during gremlin optimization.
- */
-public class OptimizationContext {
-
- private static final String TMP_ALIAS_NAME = "__tmp";
- private static final String FINAL_ALIAS_NAME = "__res";
- private static final String RESULT_VARIABLE = "r";
- private final List<GroovyExpression> initialStatements = new ArrayList<>();
- private GroovyExpression resultExpression = getResultVariable();
- private int counter = 1;
- private final Map<String, ClosureExpression> functionBodies = new HashMap<>();
- private AbstractFunctionExpression rangeExpression;
-
- public OptimizationContext() {
-
- }
-
- /**
- * @return
- */
- public List<GroovyExpression> getInitialStatements() {
- return initialStatements;
- }
-
- public void prependStatement(GroovyExpression expr) {
- initialStatements.add(0, expr);
- }
-
- public String getUniqueFunctionName() {
- return "f" + (counter++);
- }
-
-
- public GroovyExpression getDefineResultVariableStmt() {
- GroovyExpression castExpression = new TypeCoersionExpression(new ListExpression(), "Set");
- GroovyExpression resultVarDef = new VariableAssignmentExpression(RESULT_VARIABLE, castExpression);
- return resultVarDef;
-
- }
- public void setResultExpression(GroovyExpression expr) {
- resultExpression = expr;
- }
-
- public GroovyExpression getResultExpression() {
- return resultExpression;
- }
-
- public GroovyExpression getResultVariable() {
- return new IdentifierExpression(RESULT_VARIABLE);
- }
-
- public ClosureExpression getUserDefinedFunctionBody(String functionName) {
- return functionBodies.get(functionName);
- }
-
- public String addFunctionDefinition(VariableDeclaration decl, GroovyExpression body) {
- String functionName = getUniqueFunctionName();
- List<VariableDeclaration> decls = (decl == null) ? Collections.<VariableDeclaration>emptyList() : Collections.singletonList(decl);
- ClosureExpression bodyClosure = new ClosureExpression(body, decls);
- VariableAssignmentExpression expr = new VariableAssignmentExpression(functionName, bodyClosure);
- initialStatements.add(expr);
- functionBodies.put(functionName, bodyClosure);
- return functionName;
- }
-
- public String getFinalAliasName() {
- return FINAL_ALIAS_NAME;
- }
-
- public String getTempAliasName() {
- return TMP_ALIAS_NAME;
- }
-
- public void setRangeExpression(AbstractFunctionExpression rangeExpression) {
- this.rangeExpression = rangeExpression;
- }
-
- public AbstractFunctionExpression getRangeExpression() {
- return rangeExpression;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OrderFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OrderFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OrderFinder.java
deleted file mode 100644
index 792fc52..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/OrderFinder.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-
-/**
- * Finds order expression in the call hierarchy.
- *
- */
-public class OrderFinder implements CallHierarchyVisitor {
-
- private boolean hasOrderExpression;
- private GremlinExpressionFactory gremlinFactory;
-
- public OrderFinder(GremlinExpressionFactory gremlinFactory) {
- this.gremlinFactory = gremlinFactory;
- }
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
-
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
- }
-
- @Override
- public void visitNullCaller() {
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
-
- if (gremlinFactory.isOrderExpression(functionCall)) {
- hasOrderExpression = true;
- return false;
- }
- return true;
- }
-
-
- public boolean hasOrderExpression() {
-
- return hasOrderExpression;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/PathExpressionFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/PathExpressionFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/PathExpressionFinder.java
deleted file mode 100644
index 0e9070d..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/PathExpressionFinder.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * Determines whether an expression contains a path() function.
- */
-public class PathExpressionFinder implements CallHierarchyVisitor {
-
- private boolean found = false;
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
- if(expr instanceof FunctionCallExpression) {
- found = ((FunctionCallExpression)expr).getFunctionName().equals("path");
- if(found) {
- return false;
- }
- }
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
-
- }
-
- @Override
- public void visitNullCaller() {
-
- }
-
- public boolean isPathExpressionFound() {
- return found;
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
-
- return false;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RangeFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RangeFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RangeFinder.java
deleted file mode 100644
index fa8ca85..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RangeFinder.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-
-/**
- * Finds all range expressions in the call hierarchy.
- *
- */
-public class RangeFinder implements CallHierarchyVisitor {
-
- private List<AbstractFunctionExpression> rangeExpressions = new ArrayList<>();
- private GremlinExpressionFactory factory;
-
- public RangeFinder(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
-
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
- }
-
- @Override
- public void visitNullCaller() {
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
-
- if (factory.isRangeExpression(functionCall)) {
- rangeExpressions.add(functionCall);
- }
- return true;
- }
-
- public List<AbstractFunctionExpression> getRangeExpressions() {
- return rangeExpressions;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RepeatExpressionFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RepeatExpressionFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RepeatExpressionFinder.java
deleted file mode 100644
index 8344f36..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/RepeatExpressionFinder.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * Determines whether an expression contains a repeat/loop function.
- */
-public class RepeatExpressionFinder implements CallHierarchyVisitor {
-
- private boolean found = false;
- private GremlinExpressionFactory factory;
-
- public RepeatExpressionFinder(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
-
- found = factory.isRepeatExpression(expr);
- if(found) {
- return false;
- }
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
-
- }
-
- @Override
- public void visitNullCaller() {
-
- }
-
- public boolean isRepeatExpressionFound() {
- return found;
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
-
- return false;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/SplitPointFinder.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/SplitPointFinder.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/SplitPointFinder.java
deleted file mode 100644
index f0295e7..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/SplitPointFinder.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-
-
-/**
- * This class finds the first place in the expression where the value of the
- * traverser is changed from being a vertex to being something else. This is
- * important in the "or" optimization logic, since the union operation must be
- * done on *vertices* in order to preserve the semantics of the query. In addition,
- * expressions that have side effects must be moved as well, so that those
- * side effects will be available to the steps that need them.
- */
-public class SplitPointFinder implements CallHierarchyVisitor {
-
- //Any steps that change the traverser value to something that is not a vertex or edge
- //must be included here, so that the union created by ExpandOrsOptimization
- //is done over vertices/edges.
- private static final Set<TraversalStepType> TYPES_REQUIRED_IN_RESULT_EXPRESSION = new HashSet<>(
- Arrays.asList(
- TraversalStepType.BARRIER,
- TraversalStepType.BRANCH,
- TraversalStepType.SIDE_EFFECT,
- TraversalStepType.MAP_TO_VALUE,
- TraversalStepType.FLAT_MAP_TO_VALUES,
- TraversalStepType.END,
- TraversalStepType.NONE));
-
- private final Set<String> requiredAliases = new HashSet<>();
-
- //Exceptions to the requirement that all expressions with a type
- //in the above list must be in the result expression. If the
- //function name is in this list, it is ok for that expression
- //to not be in the result expression. This mechanism allows
- //aliases to remain outside the result expression. Other
- //exceptions may be found in the future.
- private static final Map<TraversalStepType, WhiteList> WHITE_LISTS = new HashMap<>();
- static {
- WHITE_LISTS.put(TraversalStepType.SIDE_EFFECT, new WhiteList("as"));
- }
-
- private final GremlinExpressionFactory factory;
-
- public SplitPointFinder(GremlinExpressionFactory factory) {
- this.factory = factory;
- }
-
- /**
- * Represents a set of function names.
- */
- private static final class WhiteList {
- private Set<String> allowedFunctionNames = new HashSet<>();
- public WhiteList(String... names) {
- for(String name : names) {
- allowedFunctionNames.add(name);
- }
- }
- public boolean contains(String name) {
- return allowedFunctionNames.contains(name);
- }
- }
-
- private AbstractFunctionExpression splitPoint;
-
- @Override
- public boolean preVisitFunctionCaller(AbstractFunctionExpression expr) {
- requiredAliases.addAll(factory.getAliasesRequiredByExpression(expr));
- return true;
- }
-
- @Override
- public void visitNonFunctionCaller(GroovyExpression expr) {
-
- }
-
- @Override
- public void visitNullCaller() {
-
- }
-
- public AbstractFunctionExpression getSplitPoint() {
- return splitPoint;
- }
-
- @Override
- public boolean postVisitFunctionCaller(AbstractFunctionExpression functionCall) {
- String aliasName = factory.getAliasNameIfRelevant(functionCall);
- if (splitPoint == null) {
-
- boolean required = isRequiredAlias(aliasName) ||
- isRequiredInResultExpression(functionCall);
- if (required) {
- splitPoint = functionCall;
- }
- }
- removeSeenAlias(aliasName);
-
- return true;
- }
-
- private void removeSeenAlias(String aliasName) {
- if(aliasName != null) {
- requiredAliases.remove(aliasName);
- }
- }
-
- private boolean isRequiredAlias(String aliasName) {
- if(aliasName != null) {
- return requiredAliases.contains(aliasName);
- }
- return false;
- }
-
- private boolean isRequiredInResultExpression(AbstractFunctionExpression expr) {
-
- TraversalStepType type = expr.getType();
- if (!TYPES_REQUIRED_IN_RESULT_EXPRESSION.contains(type)) {
- return false;
- }
-
- if(expr instanceof FunctionCallExpression) {
- FunctionCallExpression functionCall = (FunctionCallExpression)expr;
- //check if the white list permits this function call. If there is
- //no white list, all expressions with the current step type must go in the
- //result expression.
- WhiteList whiteList = WHITE_LISTS.get(type);
- if(whiteList != null && whiteList.contains(functionCall.getFunctionName())) {
- return false;
- }
- }
- return true;
-
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/optimizer/UpdatedExpressions.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/UpdatedExpressions.java b/repository/src/main/java/org/apache/atlas/gremlin/optimizer/UpdatedExpressions.java
deleted file mode 100644
index 06351ea..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/optimizer/UpdatedExpressions.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.gremlin.optimizer;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.atlas.groovy.GroovyExpression;
-
-/**
- * Represents a list of updated expressions.
- */
-public class UpdatedExpressions {
-
- private List<List<GroovyExpression>> updatedChildren = new ArrayList<>();
- private boolean changed = false;
-
- public UpdatedExpressions(boolean changed, List<List<GroovyExpression>> updatedChildren) {
- this.changed = changed;
- this.updatedChildren = updatedChildren;
- }
-
- public List<List<GroovyExpression>> getUpdatedChildren() {
- return updatedChildren;
- }
-
- public boolean hasChanges() {
- return changed;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/Expressions.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/Expressions.java b/repository/src/main/java/org/apache/atlas/query/Expressions.java
new file mode 100644
index 0000000..9e93ce4
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/Expressions.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+
+import java.util.List;
+
+public class Expressions {
+ public static class Expression {
+
+ }
+
+ public static class AliasExpression {
+ public String alias() {
+ String ret = null;
+
+ return ret;
+ }
+
+ }
+
+ public static class SelectExpression {
+ public List<AliasExpression> toJavaList() {
+ List<AliasExpression> ret = null;
+
+ return ret;
+ }
+
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/GremlinQuery.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/GremlinQuery.java b/repository/src/main/java/org/apache/atlas/query/GremlinQuery.java
new file mode 100644
index 0000000..fcb1f48
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/GremlinQuery.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.apache.atlas.query.Expressions.Expression;
+
+
+public class GremlinQuery {
+
+ public boolean hasSelectList() {
+ boolean ret = false;
+
+ return ret;
+ }
+
+ public String queryStr() {
+ String ret = null;
+
+ return ret;
+ }
+
+ public Expression expr() {
+ Expression ret = null;
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/GremlinTranslator.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/GremlinTranslator.java b/repository/src/main/java/org/apache/atlas/query/GremlinTranslator.java
new file mode 100644
index 0000000..5395ddd
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/GremlinTranslator.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.apache.atlas.query.Expressions.Expression;
+
+public class GremlinTranslator {
+ private Expression expression;
+
+ public GremlinTranslator(Expression expression) {
+ this.expression = expression;
+ }
+
+ public GremlinQuery translate() {
+ GremlinQuery ret = null;
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/QueryParams.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/QueryParams.java b/repository/src/main/java/org/apache/atlas/query/QueryParams.java
new file mode 100644
index 0000000..5af8bc7
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/QueryParams.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+
+public class QueryParams {
+ private int limit;
+ private int offset;
+
+ public QueryParams() {
+ this.limit = -1;
+ this.offset = 0;
+ }
+
+ public QueryParams(int limit, int offset) {
+ this.limit = limit;
+ this.offset = offset;
+ }
+
+ public int limit() {
+ return limit;
+ }
+
+ public void limit(int limit) {
+ this.limit = limit;
+ }
+
+ public int offset() {
+ return offset;
+ }
+
+ public void offset(int offset) {
+ this.offset = offset;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/QueryParser.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/QueryParser.java b/repository/src/main/java/org/apache/atlas/query/QueryParser.java
new file mode 100644
index 0000000..1e5e5ff
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/QueryParser.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.apache.atlas.query.Expressions.Expression;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+
+public class QueryParser {
+ private static final Set<String> RESERVED_KEYWORDS =
+ new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
+ "*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
+ "as", "times", "withPath", "limit", "offset", "orderby", "count", "max", "min",
+ "sum", "by", "order", "like"));
+
+ public static boolean isKeyword(String word) {
+ return RESERVED_KEYWORDS.contains(word);
+ }
+
+ public static Expression apply(String queryStr, QueryParams params) {
+ Expression ret = null;
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java b/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
new file mode 100644
index 0000000..04cf0b4
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/QueryProcessor.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.apache.atlas.query.Expressions.Expression;
+
+public class QueryProcessor {
+ public static Expression validate(Expression expression) {
+ Expressions.Expression ret = null;
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/query/SelectExpressionHelper.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/query/SelectExpressionHelper.java b/repository/src/main/java/org/apache/atlas/query/SelectExpressionHelper.java
new file mode 100644
index 0000000..a8748ef
--- /dev/null
+++ b/repository/src/main/java/org/apache/atlas/query/SelectExpressionHelper.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.query;
+
+import org.apache.atlas.query.Expressions.Expression;
+import org.apache.atlas.query.Expressions.SelectExpression;
+
+
+public class SelectExpressionHelper {
+ public static SelectExpression extractSelectExpression(Expression expr) {
+ SelectExpression ret = null;
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/DiscoverInstances.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/DiscoverInstances.java b/repository/src/main/java/org/apache/atlas/repository/DiscoverInstances.java
deleted file mode 100755
index 6261499..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/DiscoverInstances.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.ObjectGraphWalker;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Graph walker implementation for discovering instances.
- */
-@Deprecated
-public class DiscoverInstances implements ObjectGraphWalker.NodeProcessor {
-
- public final Map<Id, Id> idToNewIdMap;
- public final Map<Id, IReferenceableInstance> idToInstanceMap;
- final IRepository repository;
-
- public DiscoverInstances(IRepository repository) {
- this.repository = repository;
- idToNewIdMap = new HashMap<>();
- idToInstanceMap = new HashMap<>();
- }
-
- @Override
- public void processNode(ObjectGraphWalker.Node nd) throws AtlasException {
-
- IReferenceableInstance ref = null;
- Id id = null;
-
- if (nd.attributeName == null) {
- ref = (IReferenceableInstance) nd.instance;
- id = ref.getId();
- } else if (nd.aInfo.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- if (nd.value != null && (nd.value instanceof Id)) {
- id = (Id) nd.value;
- }
- }
-
- if (id != null) {
- if (id.isUnassigned()) {
- if (!idToNewIdMap.containsKey(id)) {
- idToNewIdMap.put(id, repository.newId(id.typeName));
- }
- if (ref != null && idToInstanceMap.containsKey(ref)) {
- // Oops
- throw new RepositoryException(
- String.format("Unexpected internal error: Id %s processed again", id));
- }
- if (ref != null) {
- idToInstanceMap.put(id, ref);
- }
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/IRepository.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/IRepository.java b/repository/src/main/java/org/apache/atlas/repository/IRepository.java
deleted file mode 100755
index 1637e11..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/IRepository.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository;
-
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.TraitType;
-
-import java.util.List;
-
-/**
- * Metadata Repository interface.
- */
-@Deprecated
-public interface IRepository {
-
- ITypedReferenceableInstance create(IReferenceableInstance i) throws RepositoryException;
-
- ITypedReferenceableInstance update(ITypedReferenceableInstance i) throws RepositoryException;
-
- void delete(ITypedReferenceableInstance i) throws RepositoryException;
-
- Id newId(String typeName);
-
- ITypedReferenceableInstance get(Id id) throws RepositoryException;
-
- void defineClass(ClassType type) throws RepositoryException;
-
- void defineTrait(TraitType type) throws RepositoryException;
-
- void defineTypes(List<HierarchicalType> types) throws RepositoryException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java b/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
deleted file mode 100644
index b72ee7d..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/MetadataRepository.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TraitNotFoundException;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.IDataType;
-
-import java.util.List;
-
-/**
- * An interface for persisting metadata into a blueprints enabled graph db.
- */
-@Deprecated
-public interface MetadataRepository {
-
- /**
- * Returns the property key used to store entity type name.
- *
- * @return property key used to store entity type name.
- */
- String getTypeAttributeName();
-
- /**
- * Returns the property key used to store super type names.
- *
- * @return property key used to store super type names.
- */
- String getSuperTypeAttributeName();
-
- /**
- * Returns the attribute name used for entity state
- * @return
- */
- String getStateAttributeName();
- /**
- * Returns the attribute name used for entity version
- * @return
- */
- String getVersionAttributeName();
-
- /**
- * Return the property key used to store a given traitName in the repository.
- *
- * @param dataType data type
- * @param traitName trait name
- * @return property key used to store a given traitName
- */
- String getTraitLabel(IDataType<?> dataType, String traitName);
-
- /**
- * Return the property key used to store a given attribute in the repository.
- *
- * @param dataType data type
- * @param aInfo attribute info
- * @return property key used to store a given attribute
- */
- String getFieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException;
-
- /**
- * Return the edge label for a given attribute in the repository.
- *
- * @param dataType data type
- * @param aInfo attribute info
- * @return edge label for a given attribute
- */
- String getEdgeLabel(IDataType<?> dataType, AttributeInfo aInfo) throws AtlasException;
-
- /**
- * Creates an entity definition (instance) corresponding to a given type.
- *
- * @param entities entity (typed instance)
- * @return CreateOrUpdateEntitiesResult with the guids of the entities that were created
- * @throws RepositoryException
- * @throws EntityExistsException
- */
- CreateUpdateEntitiesResult createEntities(ITypedReferenceableInstance... entities) throws RepositoryException, EntityExistsException;
-
- /**
- * Fetch the complete definition of an entity given its GUID.
- *
- * @param guid globally unique identifier for the entity
- * @return entity (typed instance) definition
- * @throws RepositoryException
- * @throws EntityNotFoundException
- */
- ITypedReferenceableInstance getEntityDefinition(String guid) throws RepositoryException, EntityNotFoundException;
-
- /**
- * Fetch the complete entity definitions for the entities with the given GUIDs
- *
- * @param guids globally unique identifiers for the entities
- * @return entity (typed instance) definitions list
- * @throws RepositoryException
- * @throws EntityNotFoundException
- */
- List<ITypedReferenceableInstance> getEntityDefinitions(String... guids) throws RepositoryException, EntityNotFoundException;
-
- /**
- * Gets the list of entities for a given entity type.
- *
- * @param entityType name of a type which is unique
- * @return a list of entity names for the given type
- * @throws RepositoryException
- */
- List<String> getEntityList(String entityType) throws RepositoryException;
-
- /**
- * Deletes entities for the specified guids.
- *
- * @param guids globally unique identifiers for the deletion candidate entities
- * @return guids of deleted entities
- * @throws RepositoryException
- */
- EntityResult deleteEntities(List<String> guids) throws RepositoryException;
-
-
- // Trait management functions
-
- /**
- * Gets the list of trait names for a given entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @return a list of trait names for the given entity guid
- * @throws RepositoryException
- */
- List<String> getTraitNames(String guid) throws AtlasException;
-
- /**
- * Adds a new trait to an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitInstance trait instance that needs to be added to entity
- * @throws RepositoryException
- */
- void addTrait(String guid, ITypedStruct traitInstance) throws RepositoryException;
-
- /**
- * Adds a new trait to a list of entities represented by their respective guids
- * @param entityGuids list of globally unique identifier for the entities
- * @param traitInstance trait instance that needs to be added to entities
- * @throws RepositoryException
- */
- void addTrait(List<String> entityGuids, ITypedStruct traitInstance) throws RepositoryException;
-
- /**
- * Deletes a given trait from an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitNameToBeDeleted name of the trait
- * @throws RepositoryException
- */
- void deleteTrait(String guid, String traitNameToBeDeleted) throws TraitNotFoundException, EntityNotFoundException, RepositoryException;
-
- /**
- * Adds/Updates the property to the entity that corresponds to the GUID
- * Supports only primitive attribute/Class Id updations.
- */
- CreateUpdateEntitiesResult updatePartial(ITypedReferenceableInstance entity) throws RepositoryException;
-
- /**
- * Adds the property to the entity that corresponds to the GUID
- * @param entitiesToBeUpdated The entities to be updated
- */
- CreateUpdateEntitiesResult updateEntities(ITypedReferenceableInstance... entitiesToBeUpdated) throws RepositoryException;
-
- /**
- * Returns the entity for the given type and qualified name
- * @param entityType
- * @param attribute
- * @param value
- * @return entity instance
- */
- ITypedReferenceableInstance getEntityDefinition(String entityType, String attribute, Object value) throws AtlasException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/RepositoryConfiguration.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/RepositoryConfiguration.java b/repository/src/main/java/org/apache/atlas/repository/RepositoryConfiguration.java
deleted file mode 100644
index 261a6d0..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/RepositoryConfiguration.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository;
-
-import org.apache.atlas.repository.graphdb.GraphDatabase;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.util.AtlasRepositoryConfiguration;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class RepositoryConfiguration {
- @Bean
- public GraphDatabase getGraphDatabase() throws IllegalAccessException, InstantiationException {
- return AtlasRepositoryConfiguration.getGraphDatabaseImpl().newInstance();
- }
-
- @Bean
- public TypeSystem getTypeSystem() {
- return TypeSystem.getInstance();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java
index 2a1881b..47d4e1d 100644
--- a/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java
+++ b/repository/src/main/java/org/apache/atlas/repository/audit/EntityAuditListener.java
@@ -23,11 +23,12 @@ import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.EntityAuditEvent.EntityAuditAction;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.types.AttributeInfo;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasEntityType;
+import org.apache.atlas.type.AtlasStructType;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
@@ -49,17 +50,19 @@ import java.util.Map;
public class EntityAuditListener implements EntityChangeListener {
private static final Logger LOG = LoggerFactory.getLogger(EntityAuditListener.class);
- private EntityAuditRepository auditRepository;
+ private final EntityAuditRepository auditRepository;
+ private final AtlasTypeRegistry typeRegistry;
@Inject
- public EntityAuditListener(EntityAuditRepository auditRepository) {
+ public EntityAuditListener(EntityAuditRepository auditRepository, AtlasTypeRegistry typeRegistry) {
this.auditRepository = auditRepository;
+ this.typeRegistry = typeRegistry;
}
@Override
- public void onEntitiesAdded(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
+ public void onEntitiesAdded(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
- for (ITypedReferenceableInstance entity : entities) {
+ for (Referenceable entity : entities) {
EntityAuditEvent event = createEvent(entity, isImport ? EntityAuditAction.ENTITY_IMPORT_CREATE : EntityAuditAction.ENTITY_CREATE);
events.add(event);
}
@@ -68,9 +71,9 @@ public class EntityAuditListener implements EntityChangeListener {
}
@Override
- public void onEntitiesUpdated(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
+ public void onEntitiesUpdated(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
- for (ITypedReferenceableInstance entity : entities) {
+ for (Referenceable entity : entities) {
EntityAuditEvent event = createEvent(entity, isImport ? EntityAuditAction.ENTITY_IMPORT_UPDATE : EntityAuditAction.ENTITY_UPDATE);
events.add(event);
}
@@ -79,11 +82,11 @@ public class EntityAuditListener implements EntityChangeListener {
}
@Override
- public void onTraitsAdded(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException {
+ public void onTraitsAdded(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException {
if (traits != null) {
- for (IStruct trait : traits) {
+ for (Struct trait : traits) {
EntityAuditEvent event = createEvent(entity, EntityAuditAction.TAG_ADD,
- "Added trait: " + InstanceSerialization.toJson(trait, true));
+ "Added trait: " + AtlasType.toV1Json(trait));
auditRepository.putEvents(event);
}
@@ -91,7 +94,7 @@ public class EntityAuditListener implements EntityChangeListener {
}
@Override
- public void onTraitsDeleted(ITypedReferenceableInstance entity, Collection<String> traitNames) throws AtlasException {
+ public void onTraitsDeleted(Referenceable entity, Collection<String> traitNames) throws AtlasException {
if (traitNames != null) {
for (String traitName : traitNames) {
EntityAuditEvent event = createEvent(entity, EntityAuditAction.TAG_DELETE, "Deleted trait: " + traitName);
@@ -102,11 +105,11 @@ public class EntityAuditListener implements EntityChangeListener {
}
@Override
- public void onTraitsUpdated(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException {
+ public void onTraitsUpdated(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException {
if (traits != null) {
- for (IStruct trait : traits) {
+ for (Struct trait : traits) {
EntityAuditEvent event = createEvent(entity, EntityAuditAction.TAG_UPDATE,
- "Updated trait: " + InstanceSerialization.toJson(trait, true));
+ "Updated trait: " + AtlasType.toV1Json(trait));
auditRepository.putEvents(event);
}
@@ -114,9 +117,9 @@ public class EntityAuditListener implements EntityChangeListener {
}
@Override
- public void onEntitiesDeleted(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
+ public void onEntitiesDeleted(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
List<EntityAuditEvent> events = new ArrayList<>();
- for (ITypedReferenceableInstance entity : entities) {
+ for (Referenceable entity : entities) {
EntityAuditEvent event = createEvent(entity, isImport ? EntityAuditAction.ENTITY_IMPORT_DELETE : EntityAuditAction.ENTITY_DELETE, "Deleted entity");
events.add(event);
}
@@ -128,23 +131,23 @@ public class EntityAuditListener implements EntityChangeListener {
return auditRepository.listEvents(guid, null, (short) 10);
}
- private EntityAuditEvent createEvent(ITypedReferenceableInstance entity, EntityAuditAction action)
+ private EntityAuditEvent createEvent(Referenceable entity, EntityAuditAction action)
throws AtlasException {
String detail = getAuditEventDetail(entity, action);
return createEvent(entity, action, detail);
}
- private EntityAuditEvent createEvent(ITypedReferenceableInstance entity, EntityAuditAction action, String details)
+ private EntityAuditEvent createEvent(Referenceable entity, EntityAuditAction action, String details)
throws AtlasException {
return new EntityAuditEvent(entity.getId()._getId(), RequestContextV1.get().getRequestTime(), RequestContextV1.get().getUser(), action, details, entity);
}
- private String getAuditEventDetail(ITypedReferenceableInstance entity, EntityAuditAction action) throws AtlasException {
+ private String getAuditEventDetail(Referenceable entity, EntityAuditAction action) throws AtlasException {
Map<String, Object> prunedAttributes = pruneEntityAttributesForAudit(entity);
String auditPrefix = getAuditPrefix(action);
- String auditString = auditPrefix + InstanceSerialization.toJson(entity, true);
+ String auditString = auditPrefix + AtlasType.toV1Json(entity);
byte[] auditBytes = auditString.getBytes(StandardCharsets.UTF_8);
long auditSize = auditBytes != null ? auditBytes.length : 0;
long auditMaxSize = auditRepository.repositoryMaxSize();
@@ -157,7 +160,7 @@ public class EntityAuditListener implements EntityChangeListener {
clearAttributeValues(entity);
- auditString = auditPrefix + InstanceSerialization.toJson(entity, true);
+ auditString = auditPrefix + AtlasType.toV1Json(entity);
addAttributeValues(entity, attrValues);
}
@@ -167,7 +170,7 @@ public class EntityAuditListener implements EntityChangeListener {
return auditString;
}
- private void clearAttributeValues(IReferenceableInstance entity) throws AtlasException {
+ private void clearAttributeValues(Referenceable entity) throws AtlasException {
Map<String, Object> attributesMap = entity.getValuesMap();
if (MapUtils.isNotEmpty(attributesMap)) {
@@ -177,7 +180,7 @@ public class EntityAuditListener implements EntityChangeListener {
}
}
- private void addAttributeValues(ITypedReferenceableInstance entity, Map<String, Object> attributesMap) throws AtlasException {
+ private void addAttributeValues(Referenceable entity, Map<String, Object> attributesMap) throws AtlasException {
if (MapUtils.isNotEmpty(attributesMap)) {
for (String attr : attributesMap.keySet()) {
entity.set(attr, attributesMap.get(attr));
@@ -185,17 +188,16 @@ public class EntityAuditListener implements EntityChangeListener {
}
}
- private Map<String, Object> pruneEntityAttributesForAudit(ITypedReferenceableInstance entity) throws AtlasException {
+ private Map<String, Object> pruneEntityAttributesForAudit(Referenceable entity) throws AtlasException {
Map<String, Object> ret = null;
Map<String, Object> entityAttributes = entity.getValuesMap();
List<String> excludeAttributes = auditRepository.getAuditExcludeAttributes(entity.getTypeName());
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName());
- if (CollectionUtils.isNotEmpty(excludeAttributes) && MapUtils.isNotEmpty(entityAttributes)) {
- Map<String, AttributeInfo> attributeInfoMap = entity.fieldMapping().fields;
-
- for (String attrName : entityAttributes.keySet()) {
+ if (CollectionUtils.isNotEmpty(excludeAttributes) && MapUtils.isNotEmpty(entityAttributes) && entityType != null) {
+ for (AtlasStructType.AtlasAttribute attribute : entityType.getAllAttributes().values()) {
+ String attrName = attribute.getName();
Object attrValue = entityAttributes.get(attrName);
- AttributeInfo attrInfo = attributeInfoMap.get(attrName);
if (excludeAttributes.contains(attrName)) {
if (ret == null) {
@@ -204,15 +206,15 @@ public class EntityAuditListener implements EntityChangeListener {
ret.put(attrName, attrValue);
entity.setNull(attrName);
- } else if (attrInfo.isComposite) {
+ } else if (attribute.isOwnedRef()) {
if (attrValue instanceof Collection) {
- for (Object attribute : (Collection) attrValue) {
- if (attribute instanceof ITypedReferenceableInstance) {
- ret = pruneAttributes(ret, (ITypedReferenceableInstance) attribute);
+ for (Object arrElem : (Collection) attrValue) {
+ if (arrElem instanceof Referenceable) {
+ ret = pruneAttributes(ret, (Referenceable) arrElem);
}
}
- } else if (attrValue instanceof ITypedReferenceableInstance) {
- ret = pruneAttributes(ret, (ITypedReferenceableInstance) attrValue);
+ } else if (attrValue instanceof Referenceable) {
+ ret = pruneAttributes(ret, (Referenceable) attrValue);
}
}
}
@@ -221,9 +223,9 @@ public class EntityAuditListener implements EntityChangeListener {
return ret;
}
- private Map<String, Object> pruneAttributes(Map<String, Object> ret, ITypedReferenceableInstance attribute) throws AtlasException {
- ITypedReferenceableInstance attrInstance = attribute;
- Map<String, Object> prunedAttrs = pruneEntityAttributesForAudit(attrInstance);
+ private Map<String, Object> pruneAttributes(Map<String, Object> ret, Referenceable attribute) throws AtlasException {
+ Referenceable attrInstance = attribute;
+ Map<String, Object> prunedAttrs = pruneEntityAttributesForAudit(attrInstance);
if (MapUtils.isNotEmpty(prunedAttrs)) {
if (ret == null) {
@@ -232,41 +234,42 @@ public class EntityAuditListener implements EntityChangeListener {
ret.put(attrInstance.getId()._getId(), prunedAttrs);
}
+
return ret;
}
- private void restoreEntityAttributes(ITypedReferenceableInstance entity, Map<String, Object> prunedAttributes) throws AtlasException {
+ private void restoreEntityAttributes(Referenceable entity, Map<String, Object> prunedAttributes) throws AtlasException {
if (MapUtils.isEmpty(prunedAttributes)) {
return;
}
- Map<String, Object> entityAttributes = entity.getValuesMap();
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName(entity.getTypeName());
- if (MapUtils.isNotEmpty(entityAttributes)) {
- Map<String, AttributeInfo> attributeInfoMap = entity.fieldMapping().fields;
+ if (entityType != null && MapUtils.isNotEmpty(entityType.getAllAttributes())) {
+ Map<String, Object> entityAttributes = entity.getValuesMap();
- for (String attrName : entityAttributes.keySet()) {
- Object attrValue = entityAttributes.get(attrName);
- AttributeInfo attrInfo = attributeInfoMap.get(attrName);
+ for (AtlasStructType.AtlasAttribute attribute : entityType.getAllAttributes().values()) {
+ String attrName = attribute.getName();
+ Object attrValue = entityAttributes.get(attrName);
if (prunedAttributes.containsKey(attrName)) {
entity.set(attrName, prunedAttributes.get(attrName));
- } else if (attrInfo.isComposite) {
+ } else if (attribute.isOwnedRef()) {
if (attrValue instanceof Collection) {
- for (Object attributeEntity : (Collection) attrValue) {
- if (attributeEntity instanceof ITypedReferenceableInstance) {
- restoreAttributes(prunedAttributes, (ITypedReferenceableInstance) attributeEntity);
+ for (Object arrElem : (Collection) attrValue) {
+ if (arrElem instanceof Referenceable) {
+ restoreAttributes(prunedAttributes, (Referenceable) arrElem);
}
}
- } else if (attrValue instanceof ITypedReferenceableInstance) {
- restoreAttributes(prunedAttributes, (ITypedReferenceableInstance) attrValue);
+ } else if (attrValue instanceof Referenceable) {
+ restoreAttributes(prunedAttributes, (Referenceable) attrValue);
}
}
}
}
}
- private void restoreAttributes(Map<String, Object> prunedAttributes, ITypedReferenceableInstance attributeEntity) throws AtlasException {
+ private void restoreAttributes(Map<String, Object> prunedAttributes, Referenceable attributeEntity) throws AtlasException {
Object obj = prunedAttributes.get(attributeEntity.getId()._getId());
if (obj instanceof Map) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasClassificationFormatConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasClassificationFormatConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasClassificationFormatConverter.java
index cd4f165..d91772c 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasClassificationFormatConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasClassificationFormatConverter.java
@@ -18,14 +18,13 @@
package org.apache.atlas.repository.converters;
import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasClassification;
+import org.apache.atlas.v1.model.instance.Struct;
import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IStruct;
import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -55,19 +54,12 @@ public class AtlasClassificationFormatConverter extends AtlasStructFormatConvert
} else {
ret = new AtlasClassification(type.getTypeName());
}
- } else if (v1Obj instanceof IStruct) {
- IStruct struct = (IStruct) v1Obj;
- Map<String, Object> v1Attribs = null;
+ } else if (v1Obj instanceof Struct) {
+ Struct struct = (Struct) v1Obj;
- try {
- v1Attribs = struct.getValuesMap();
- } catch (AtlasException excp) {
- LOG.error("IStruct.getValuesMap() failed", excp);
- }
-
- ret = new AtlasClassification(type.getTypeName(), fromV1ToV2(classificationType, v1Attribs, ctx));
+ ret = new AtlasClassification(type.getTypeName(), fromV1ToV2(classificationType, struct.getValues(), ctx));
} else {
- throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "Map or IStruct",
+ throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "Map or Struct",
v1Obj.getClass().getCanonicalName());
}
}
[08/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
deleted file mode 100644
index 0d86474..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCache.java
+++ /dev/null
@@ -1,301 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types.cache;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.ConditionalOnAtlasProperty;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * Caches the types in-memory within the same process space.
- */
-@SuppressWarnings("rawtypes")
-@Singleton
-@Component
-@ConditionalOnAtlasProperty(property = "atlas.TypeCache.impl", isDefault = true)
-public class DefaultTypeCache implements TypeCache {
- private static final Logger LOG = LoggerFactory.getLogger(DefaultTypeCache.class);
-
- private Map<String, IDataType> types_ = new ConcurrentHashMap<>();
- private static final List<TypeCategory> validTypeFilterCategories =
- Arrays.asList(TypeCategory.CLASS, TypeCategory.TRAIT, TypeCategory.ENUM, TypeCategory.STRUCT);
- private static final List<TypeCategory> validSupertypeFilterCategories =
- Arrays.asList(TypeCategory.CLASS, TypeCategory.TRAIT);
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#has(java.lang
- * .String)
- */
- @Override
- public boolean has(String typeName) throws AtlasException {
-
- return types_.containsKey(typeName);
- }
-
- /* (non-Javadoc)
- * @see org.apache.atlas.typesystem.types.cache.TypeCache#has(org.
- * apache.atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
- */
- @Override
- public boolean has(TypeCategory typeCategory, String typeName)
- throws AtlasException {
-
- assertValidTypeCategory(typeCategory);
- return has(typeName);
- }
-
- private void assertValidTypeCategory(String typeCategory) {
- assertValidTypeCategory(TypeCategory.valueOf(typeCategory));
- }
-
- private void assertValidTypeCategory(TypeCategory typeCategory) {
- // there might no need of 'typeCategory' in this implementation for
- // certain API, but for a distributed cache, it might help for the
- // implementers to partition the types per their category
- // while persisting so that look can be efficient
-
- if (typeCategory == null) {
- throw new IllegalArgumentException("Category of the types to be filtered is null.");
- }
-
- if (!validTypeFilterCategories.contains(typeCategory)) {
- throw new IllegalArgumentException("Category of the types should be one of " +
- StringUtils.join(validTypeFilterCategories, ", "));
- }
- }
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#get(java.lang
- * .String)
- */
- @Override
- public IDataType get(String typeName) throws AtlasException {
-
- return types_.get(typeName);
- }
-
- /* (non-Javadoc)
- * @see org.apache.atlas.typesystem.types.cache.TypeCache#get(org.apache.
- * atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
- */
- @Override
- public IDataType get(TypeCategory typeCategory, String typeName) throws AtlasException {
-
- assertValidTypeCategory(typeCategory);
- return get(typeName);
- }
-
- /**
- * Return the list of type names in the type system which match the specified filter.
- *
- * @return list of type names
- * @param filterMap - Map of filter for type names. Valid keys are CATEGORY, SUPERTYPE, NOT_SUPERTYPE
- * For example, CATEGORY = TRAIT && SUPERTYPE contains 'X' && SUPERTYPE !contains 'Y'
- */
- @Override
- public Collection<String> getTypeNames(Map<TYPE_FILTER, String> filterMap) throws AtlasException {
- assertFilter(filterMap);
-
- List<String> typeNames = new ArrayList<>();
- for (IDataType type : types_.values()) {
- if (shouldIncludeType(type, filterMap)) {
- typeNames.add(type.getName());
- }
- }
- return typeNames;
- }
-
- private boolean shouldIncludeType(IDataType type, Map<TYPE_FILTER, String> filterMap) {
- if (filterMap == null) {
- return true;
- }
-
- for (Entry<TYPE_FILTER, String> filterEntry : filterMap.entrySet()) {
- switch (filterEntry.getKey()) {
- case CATEGORY:
- if (!filterEntry.getValue().equals(type.getTypeCategory().name())) {
- return false;
- }
- break;
-
- case SUPERTYPE:
- if (!validSupertypeFilterCategories.contains(type.getTypeCategory()) ||
- !((HierarchicalType) type).getAllSuperTypeNames().contains(filterEntry.getValue())) {
- return false;
- }
- break;
-
- case NOT_SUPERTYPE:
- if (!validSupertypeFilterCategories.contains(type.getTypeCategory()) ||
- type.getName().equals(filterEntry.getValue()) ||
- ((HierarchicalType) type).getAllSuperTypeNames().contains(filterEntry.getValue())) {
- return false;
- }
- break;
- }
- }
- return true;
- }
-
-
- private void assertFilter(Map<TYPE_FILTER, String> filterMap) throws AtlasException {
- if (filterMap == null) {
- return;
- }
-
- for (Entry<TYPE_FILTER, String> filterEntry : filterMap.entrySet()) {
- switch (filterEntry.getKey()) {
- case CATEGORY:
- assertValidTypeCategory(filterEntry.getValue());
- break;
-
- case SUPERTYPE:
- case NOT_SUPERTYPE:
- if (!has(filterEntry.getValue())) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("{}: supertype does not exist", filterEntry.getValue());
- }
- }
- break;
-
- default:
- throw new IllegalStateException("Unhandled filter " + filterEntry.getKey());
- }
- }
- }
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#getAllNames()
- */
- @Override
- public Collection<String> getAllTypeNames() throws AtlasException {
-
- return types_.keySet();
- }
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#put(org.apache
- * .atlas.typesystem.types.IDataType)
- */
- @Override
- public void put(IDataType type) throws AtlasException {
-
- assertValidType(type);
- types_.put(type.getName(), type);
- }
-
- private void assertValidType(IDataType type) throws
- AtlasException {
-
- if (type == null) {
- throw new AtlasException("type is null.");
- }
-
- boolean validTypeCategory = (type instanceof ClassType) ||
- (type instanceof TraitType) ||
- (type instanceof EnumType) ||
- (type instanceof StructType);
-
- if (!validTypeCategory) {
- throw new AtlasException("Category of the types should be one of ClassType | "
- + "TraitType | EnumType | StructType.");
- }
- }
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#putAll(java
- * .util.Collection)
- */
- @Override
- public void putAll(Collection<IDataType> types) throws AtlasException {
-
- for (IDataType type : types) {
- assertValidType(type);
- types_.put(type.getName(), type);
- }
- }
-
- /*
- * (non-Javadoc)
- * @see
- * org.apache.atlas.typesystem.types.cache.TypeCache#remove(java
- * .lang.String)
- */
- @Override
- public void remove(String typeName) throws AtlasException {
-
- types_.remove(typeName);
- }
-
- /* (non-Javadoc)
- * @see org.apache.atlas.typesystem.types.cache.TypeCache#remove(org.
- * apache.atlas.typesystem.types.DataTypes.TypeCategory, java.lang.String)
- */
- @Override
- public void remove(TypeCategory typeCategory, String typeName)
- throws AtlasException {
-
- assertValidTypeCategory(typeCategory);
- remove(typeName);
- }
-
- /*
- * (non-Javadoc)
- * @see org.apache.atlas.typesystem.types.cache.TypeCache#clear()
- */
- @Override
- public void clear() {
-
- types_.clear();
- }
-
- @Override
- public IDataType onTypeFault(String typeName) throws AtlasException {
-
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
deleted file mode 100644
index c8f65be..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/cache/TypeCache.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types.cache;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.util.Collection;
-import java.util.Map;
-
-/**
- * The types are cached to allow faster lookup when type info is needed during
- * creation/updation of entities, DSL query translation/execution.
- * Implementations of this can chose to plugin a distributed cache
- * or an in-memory cache synched across nodes in an Altas cluster. <br>
- * <br>
- * Type entries in the cache can be one of ... <br>
- * {@link org.apache.atlas.typesystem.types.ClassType} <br>
- * {@link org.apache.atlas.typesystem.types.TraitType} <br>
- * {@link org.apache.atlas.typesystem.types.StructType} <br>
- * {@link org.apache.atlas.typesystem.types.EnumType}
- */
-@SuppressWarnings("rawtypes")
-public interface TypeCache {
-
- enum TYPE_FILTER {
- CATEGORY, SUPERTYPE, NOT_SUPERTYPE
- }
-
- /**
- * @param typeName
- * @return true if the type exists in cache, false otherwise.
- * @throws AtlasException
- */
- boolean has(String typeName) throws AtlasException;
-
- /**
- * @param typeCategory Non-null category of type. The category can be one of
- * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | TypeCategory.ENUM.
- * @param typeName
- * @return true if the type of given category exists in cache, false otherwise.
- * @throws AtlasException
- */
- boolean has(DataTypes.TypeCategory typeCategory, String typeName) throws AtlasException;
-
- /**
- * @param typeName The name of the type.
- * @return returns non-null type if cached, otherwise null
- * @throws AtlasException
- */
- IDataType get(String typeName) throws AtlasException;
-
- /**
- * @param typeCategory Non-null category of type. The category can be one of
- * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | TypeCategory.ENUM.
- * @param typeName
- * @return returns non-null type (of the specified category) if cached, otherwise null
- * @throws AtlasException
- */
- IDataType get(DataTypes.TypeCategory typeCategory, String typeName) throws AtlasException;
-
- /**
- *
- * @param filter @return
- * @throws AtlasException
- */
- Collection<String> getTypeNames(Map<TYPE_FILTER, String> filter) throws AtlasException;
-
- /**
- * This is a convenience API to get the names of all types.
- *
- * @see TypeCache#getTypeNames(Map)
- * @return
- * @throws AtlasException
- */
- Collection<String> getAllTypeNames() throws AtlasException;
-
- /**
- * @param type The type to be added to the cache. The type should not be
- * null, otherwise throws NullPointerException. <br>
- * Type entries in the cache can be one of ... <br>
- * {@link org.apache.atlas.typesystem.types.ClassType} <br>
- * {@link org.apache.atlas.typesystem.types.TraitType} <br>
- * {@link org.apache.atlas.typesystem.types.StructType} <br>
- * {@link org.apache.atlas.typesystem.types.EnumType}
- * @throws AtlasException
- */
- void put(IDataType type) throws AtlasException;
-
- /**
- * @param types The types to be added to the cache. The type should not be
- * null, otherwise throws NullPointerException. <br>
- * Type entries in the cache can be one of ... <br>
- * {@link org.apache.atlas.typesystem.types.ClassType} <br>
- * {@link org.apache.atlas.typesystem.types.TraitType} <br>
- * {@link org.apache.atlas.typesystem.types.StructType} <br>
- * {@link org.apache.atlas.typesystem.types.EnumType}
- * @throws AtlasException
- */
- void putAll(Collection<IDataType> types) throws AtlasException;
-
- /**
- * @param typeName Name of the type to be removed from the cache. If type
- * exists, it will be removed, otherwise does nothing.
- * @throws AtlasException
- */
- void remove(String typeName) throws AtlasException;
-
- /**
- * @param typeCategory Non-null category of type. The category can be one of
- * TypeCategory.CLASS | TypeCategory.TRAIT | TypeCategory.STRUCT | TypeCategory.ENUM.
- * @param typeName Name of the type to be removed from the cache. If type
- * exists, it will be removed, otherwise does nothing.
- * @throws AtlasException
- */
- void remove(DataTypes.TypeCategory typeCategory, String typeName) throws AtlasException;
-
- /**
- * Clear the type cache
- *
- */
- void clear();
-
- /**
- * Called when a type lookup request on {@link TypeSystem}
- * fails because the type is not present in the runtime type information.
- * Implementations can take action such as retrieving the requested type
- * from some persistent storage.
-
- * @param typeName
- * @throws AtlasException
- */
- IDataType onTypeFault(String typeName) throws AtlasException;
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
deleted file mode 100755
index fbd4216..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/package-info.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * <h2>Types:</h2>
- * <img src="doc-files/dataTypes.png" />
- * <ul>
- * <li> <b>IDataType:</b> Represents a <i>DataType</i> in the TypeSystem. All Instances and
- * Attributes are associated
- * with a DataType. They represent the <b>Set</b> of values that Instances/Attributes of this
- * type can have.
- * Currently the namespace of DataTypes is flat. DataTypes can be asked to <i>convert</i>
- * arbitrary java Objects
- * to instances of this type, and they can be asked for a String representation of an
- * instance.</li>
- * <li><b>Type Categories:</b></li> DataTypes are grouped into Categories. A Category implies
- * certain semantics about
- * the Types belonging to the Category. We have PRIMITIVE, ENUM, ARRAY, MAP, STRUCT, TRAIT,
- * and CLASS categories.
- * <li><b>Primitive Types:</b> There are corresponding DataTypes for the java primitives:
- * Boolean, Byte, Short,
- * Int, Long, Float, Double. We also support BigInteger, BigDecimal, String, and Date</li>
- * <li><b>Collection Types:</b>ArrayType and MapType are parameterized DataTypes taking one
- * and two parameters
- * respectively.</li>
- * <li><b>Enum Types:</b> Used to define DataTypes with all valid values listed in the Type
- * definition. For e.g.
- * <pre>
- * {@code
- * ts.defineEnumType("HiveObjectType",
-new EnumValue("GLOBAL", 1),
-new EnumValue("DATABASE", 2),
-new EnumValue("TABLE", 3),
-new EnumValue("PARTITION", 4),
-new EnumValue("COLUMN", 5))
- * }
- * </pre> Each <i>EnumValue</i> has name and an ordinal. Either one can be used as a value for an
- * Attribute of this Type.
- * </li>
- * <li><b>Constructable Types:</b> Are complex Types that are composed of Attributes. We
- * support Structs, Classes
- * and Traits constructable types. A ConstructableType is parameterized by the Type of its
- * <i>Instance</i> java
- * class(these are implementations of the ITypedInstance interface). A value of the
- * IConstructableType will
- * implement this parameterized Type. IConstructableTypes can be asked to create an 'empty'
- * instance of their Type.
- * IConstructableTypes are associated with FieldMappings that encapsulate the mapping from/to
- * the ITypedInstance
- * java object.
- * </li>
- * <li><b>Attribute Info:</b>Represents an Attribute of a complex datatype. Attributes are
- * defined by a name, a
- * dataType, its Multiplicity and whether it is a composite relation. <i>Multiplicity</i> is
- * a constraint on the
- * number of instances that an instance can have. For non collection types and Maps:
- * Multiplicity is OPTIONAL or
- * REQUIRED.
- * For Arrays the Multiplicity is specified by a lower-bound, upper-bound and a uniqueness
- * constraint.
- * </li>
- * <li><b>Struct Types:</b>Are IConstructableTypes whose instances are IStructs. Conceptually
- * these are like 'C'
- * structs: they represent a collection of Attributes. For e.g.
- * <pre>
- * {@code
- * ts.defineStructType(STRUCT_TYPE_1,
-true,
-createRequiredAttrDef("a", DataTypes.INT_TYPE),
-createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
-createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
-createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
-createOptionalAttrDef("e", DataTypes.INT_TYPE),
-createOptionalAttrDef("f", DataTypes.INT_TYPE),
-createOptionalAttrDef("g", DataTypes.LONG_TYPE),
-createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
-createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
-createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
-createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
-createOptionalAttrDef("l", DataTypes.DATE_TYPE),
-createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
-createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
-createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))
- * }
- * </pre>
- * </li>
- * <li><b>Hierarchical Types:</b>Are DataTypes that can have a SuperType. Classes and Traits
- * are the supported
- * Hierarchical Types. </li>
- * <li><b>Class Types:</b></li>
- * <li><b>Trait Types:</b></li>
- * </ul>
- *
- *
- * <h2>Instances:</h2>
- * <img src="doc-files/instance.png" />
- * <ul>
- * <li> <b>IStruct:</b></li>
- * <li><b>IReferenceableInstance:</b></li>
- * <li><b>ITypedStruct:</b></li>
- * <li><b>ITypedReferenceableInstance:</b></li>
- * </ul>
- *
- * <h3>Serialization of Types:</h3>
- *
- * <h3>Serialization of Instances:</h3>
- *
- * <h3>Searching on Classes and Traits:</h3>
- */
-package org.apache.atlas.typesystem.types;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
deleted file mode 100755
index 39244dc..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/utils/TypesUtil.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types.utils;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.AtlasConstants;
-
-import org.apache.atlas.typesystem.types.TypeSystem;
-import scala.collection.JavaConversions;
-
-/**
- * Types utilities class.
- */
-public class TypesUtil {
-
- private TypesUtil() {
- }
-
- public static AttributeDefinition createOptionalAttrDef(String name, IDataType dataType) {
- return new AttributeDefinition(name, dataType.getName(), Multiplicity.OPTIONAL, false, null);
- }
-
- public static AttributeDefinition createOptionalAttrDef(String name, String dataType) {
- return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
- }
-
- public static AttributeDefinition createRequiredAttrDef(String name, String dataType) {
- return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
- }
-
- public static AttributeDefinition createUniqueRequiredAttrDef(String name, IDataType dataType) {
- return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, true, true, null);
- }
-
- public static AttributeDefinition createRequiredAttrDef(String name, IDataType dataType) {
- return new AttributeDefinition(name, dataType.getName(), Multiplicity.REQUIRED, false, null);
- }
-
- public static EnumTypeDefinition createEnumTypeDef(String name, EnumValue... enumValues) {
- return new EnumTypeDefinition(name, enumValues);
- }
-
- public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return createTraitTypeDef(name, null, superTypes, attrDefs);
- }
-
- public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name, String description,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return createTraitTypeDef(name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attrDefs);
- }
-
- public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(String name, String description, String version,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return new HierarchicalTypeDefinition<>(TraitType.class, name, description, version, superTypes, attrDefs);
- }
-
- public static StructTypeDefinition createStructTypeDef(String name, AttributeDefinition... attrDefs) {
- return createStructTypeDef(name, null, attrDefs);
- }
-
- public static StructTypeDefinition createStructTypeDef(String name, String description, AttributeDefinition... attrDefs) {
- return new StructTypeDefinition(name, description, attrDefs);
- }
-
- public static StructTypeDefinition createStructTypeDef(String name, String description, String version, AttributeDefinition... attrDefs) {
- return new StructTypeDefinition(name, description, version, attrDefs);
- }
-
- public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return createClassTypeDef(name, null, superTypes, attrDefs);
- }
-
- public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, String description,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return createClassTypeDef(name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attrDefs);
- }
-
- public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(String name, String description, String version,
- ImmutableSet<String> superTypes, AttributeDefinition... attrDefs) {
- return new HierarchicalTypeDefinition<>(ClassType.class, name, description, version, superTypes, attrDefs);
- }
-
- public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
- ImmutableList<StructTypeDefinition> structs, ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
- return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs),
- JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes));
- }
-
- private static final TypeSystem ts = TypeSystem.getInstance();
-
- public static AttributeInfo newAttributeInfo(String attribute, IDataType type) {
- try {
- return new AttributeInfo(ts, new AttributeDefinition(attribute, type.getName(), Multiplicity.REQUIRED,
- false, null), null);
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
-
- /**
- * Get the field mappings for the specified data type.
- * Field mappings are only relevant for CLASS, TRAIT, and STRUCT types.
- *
- * @param type
- * @return {@link FieldMapping} for the specified type
- * @throws IllegalArgumentException if type is not a CLASS, TRAIT, or STRUCT type.
- */
- public static FieldMapping getFieldMapping(IDataType type) {
- switch (type.getTypeCategory()) {
- case CLASS:
- case TRAIT:
- return ((HierarchicalType)type).fieldMapping();
-
- case STRUCT:
- return ((StructType)type).fieldMapping();
-
- default:
- throw new IllegalArgumentException("Type " + type + " doesn't have any fields!");
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/resources/atlas-log4j.xml
----------------------------------------------------------------------
diff --git a/typesystem/src/main/resources/atlas-log4j.xml b/typesystem/src/main/resources/atlas-log4j.xml
deleted file mode 100755
index 510e2cf..0000000
--- a/typesystem/src/main/resources/atlas-log4j.xml
+++ /dev/null
@@ -1,105 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
- <appender name="console" class="org.apache.log4j.ConsoleAppender">
- <param name="Target" value="System.out"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d %-5p - [%t:%x] ~ %m (%C{1}:%L)%n"/>
- </layout>
- </appender>
-
- <appender name="AUDIT" class="org.apache.log4j.RollingFileAppender">
- <param name="File" value="${atlas.log.dir}/audit.log"/>
- <param name="Append" value="true"/>
- <param name="Threshold" value="debug"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d %x %m%n"/>
- <param name="maxFileSize" value="100MB" />
- <param name="maxBackupIndex" value="20" />
- </layout>
- </appender>
-
- <logger name="org.apache.atlas" additivity="false">
- <level value="debug"/>
- <appender-ref ref="console"/>
- </logger>
-
- <!-- uncomment this block to generate performance traces
- <appender name="perf_appender" class="org.apache.log4j.DailyRollingFileAppender">
- <param name="file" value="${atlas.log.dir}/atlas_perf.log" />
- <param name="datePattern" value="'.'yyyy-MM-dd" />
- <param name="append" value="true" />
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d|%t|%m%n" />
- </layout>
- </appender>
-
- <logger name="org.apache.atlas.perf" additivity="false">
- <level value="debug" />
- <appender-ref ref="perf_appender" />
- </logger>
- -->
-
- <appender name="FAILED" class="org.apache.log4j.DailyRollingFileAppender">
- <param name="File" value="${atlas.log.dir}/failed.log"/>
- <param name="Append" value="true"/>
- <layout class="org.apache.log4j.PatternLayout">
- <param name="ConversionPattern" value="%d %m"/>
- </layout>
- </appender>
-
- <logger name="FAILED" additivity="false">
- <level value="info"/>
- <appender-ref ref="AUDIT"/>
- </logger>
-
- <logger name="com.thinkaurelius.titan" additivity="false">
- <level value="warn"/>
- <appender-ref ref="console"/>
- </logger>
-
- <logger name="org.springframework" additivity="false">
- <level value="warn"/>
- <appender-ref ref="console"/>
- </logger>
-
- <logger name="org.eclipse" additivity="false">
- <level value="warn"/>
- <appender-ref ref="console"/>
- </logger>
-
- <logger name="com.sun.jersey" additivity="false">
- <level value="warn"/>
- <appender-ref ref="console"/>
- </logger>
-
- <logger name="AUDIT" additivity="false">
- <level value="info"/>
- <appender-ref ref="console"/>
- </logger>
-
- <root>
- <priority value="warn"/>
- <appender-ref ref="console"/>
- </root>
-
-</log4j:configuration>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
deleted file mode 100755
index b51048d..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem
-
-import org.apache.atlas.typesystem.types._
-
-case class TypesDef(enumTypes: Seq[EnumTypeDefinition],
- structTypes: Seq[StructTypeDefinition],
- traitTypes: Seq[HierarchicalTypeDefinition[TraitType]],
- classTypes: Seq[HierarchicalTypeDefinition[ClassType]]) {
- def this() = this(Seq(), Seq(), Seq(), Seq())
- def this(enumType : EnumTypeDefinition) = this(Seq(enumType), Seq(), Seq(), Seq())
- def this(structType: StructTypeDefinition) = this(Seq(), Seq(structType), Seq(), Seq())
- def this(typ: HierarchicalTypeDefinition[_], isTrait : Boolean) = this(
- Seq(),
- Seq(),
- if ( isTrait )
- Seq(typ.asInstanceOf[HierarchicalTypeDefinition[TraitType]]) else Seq(),
- if (!isTrait )
- Seq(typ.asInstanceOf[HierarchicalTypeDefinition[ClassType]]) else Seq()
- )
-
- def enumTypesAsJavaList() = {
- import scala.collection.JavaConverters._
- enumTypes.asJava
- }
-
- def structTypesAsJavaList() = {
- import scala.collection.JavaConverters._
- structTypes.asJava
- }
-
- def traitTypesAsJavaList() = {
- import scala.collection.JavaConverters._
- traitTypes.asJava
- }
-
- def classTypesAsJavaList() = {
- import scala.collection.JavaConverters._
- classTypes.asJava
- }
-
- def isEmpty() = {
- enumTypes.isEmpty & structTypes.isEmpty & traitTypes.isEmpty & classTypes.isEmpty
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
deleted file mode 100644
index 9e22f67..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.typesystem.{IReferenceableInstance, IStruct, Referenceable, Struct}
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import scala.collection.mutable.ArrayBuffer
-import scala.language.{dynamics, implicitConversions}
-import scala.util.DynamicVariable
-
-class InstanceBuilder extends Dynamic {
-
- private val references : ArrayBuffer[Referenceable] = new ArrayBuffer[Referenceable]()
-
- val context = new DynamicVariable[DynamicStruct](null)
-
- def struct(typeName : String) : DynamicStruct = {
- context.value = new DynamicStruct(this, new Struct(typeName))
- context.value
- }
-
- def instance(typeName: String, traitNames: String*)(f : => Unit) : DynamicReference = {
- val r = new Referenceable(typeName, traitNames:_*)
- references.append(r)
- val dr = new DynamicReference(this, r)
- context.withValue(dr){f}
- dr
- }
-
- def create( f : => Unit ) : java.util.List[Referenceable] = {
- f
- references.asJava
- }
-
- def applyDynamic(name : String)(value : Any) : Any = {
- context.value.updateDynamic(name)(value)
- }
-
- implicit def symbolToDynamicStruct(s : Symbol) : DynamicValue =
- new DynamicValue(this, s.name, if (context.value == null) null else context.value.s)
-
-}
-
-object DynamicValue {
-
- private[builders] def transformOut(s: IStruct, attr : String, v : Any)(implicit ib : InstanceBuilder) : DynamicValue =
- v match {
- case r : Referenceable => new DynamicReference(ib, r)
- case s : Struct => new DynamicStruct(ib, s)
- case jL : java.util.List[_] => {
- if ( s != null ) {
- new DynamicCollection(ib, attr, s)
- } else {
- new DynamicValue(ib, attr, s, jL.map { e => transformOut(null, null, e) })
- }
- }
- case jM : java.util.Map[_,_] => {
- if ( s != null ) {
- new DynamicMap(ib, attr, s)
- } else {
- new DynamicValue(ib, attr, s, jM.map {
- case (k, v) => k -> transformOut(null, null, v)
- }.toMap)
- }
- }
- case x => {
- if ( s != null ) {
- new DynamicValue(ib, attr, s)
- } else {
- new DynamicValue(ib, attr, s, x)
- }
- }
- }
-
- private[builders] def transformIn(v : Any) : Any = v match {
- case dr : DynamicReference => dr.r
- case ds : DynamicStruct => ds.s
- case dv : DynamicValue => dv.get
- case l : Seq[_] => l.map{ e => transformIn(e)}.asJava
- case m : Map[_,_] => m.map {
- case (k,v) => k -> transformIn(v)
- }.asJava
- case x => x
- }
-
-}
-
-class DynamicValue(val ib : InstanceBuilder, val attrName : String, val s: IStruct, var value : Any = null) extends Dynamic {
- import DynamicValue._
-
- implicit val iib : InstanceBuilder = ib
-
- def ~(v : Any): Unit = {
- if ( s != null ) {
- s.set(attrName, transformIn(v))
- } else {
- value = v
- }
- }
-
- def get : Any = if ( s != null ) s.get(attrName) else value
-
- def selectDynamic(name: String) : DynamicValue = {
-
- throw new UnsupportedOperationException()
- }
-
- def update(key : Any, value : Object): Unit = {
- throw new UnsupportedOperationException()
- }
-
- def apply(key : Any): DynamicValue = {
-
- if ( s != null && s.isInstanceOf[Referenceable] && key.isInstanceOf[String]) {
- val r = s.asInstanceOf[Referenceable]
- if ( r.getTraits contains attrName ) {
- val traitAttr = key.asInstanceOf[String]
- return new DynamicStruct(ib, r.getTrait(attrName)).selectDynamic(traitAttr)
- }
- }
- throw new UnsupportedOperationException()
- }
-}
-
-class DynamicCollection(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) {
- import DynamicValue._
-
- override def update(key : Any, value : Object): Unit = {
- var jL = s.get(attrName)
- val idx = key.asInstanceOf[Int]
- if (jL == null ) {
- val l = new java.util.ArrayList[Object]()
- l.ensureCapacity(idx)
- jL = l
- }
- val nJL = new java.util.ArrayList[Object](jL.asInstanceOf[java.util.List[Object]])
- nJL.asInstanceOf[java.util.List[Object]].set(idx, transformIn(value).asInstanceOf[Object])
- s.set(attrName, nJL)
- }
-
- override def apply(key : Any): DynamicValue = {
- var jL = s.get(attrName)
- val idx = key.asInstanceOf[Int]
- if (jL == null ) {
- null
- } else {
- transformOut(null, null, jL.asInstanceOf[java.util.List[Object]].get(idx))
- }
- }
-}
-
-class DynamicMap(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) {
- import DynamicValue._
- override def update(key : Any, value : Object): Unit = {
- var jM = s.get(attrName)
- if (jM == null ) {
- jM = new java.util.HashMap[Object, Object]()
- }
- jM.asInstanceOf[java.util.Map[Object, Object]].put(key.asInstanceOf[AnyRef], value)
- }
-
- override def apply(key : Any): DynamicValue = {
- var jM = s.get(attrName)
- if (jM == null ) {
- null
- } else {
- transformOut(null, null, jM.asInstanceOf[java.util.Map[Object, Object]].get(key))
- }
- }
-}
-
-class DynamicStruct(ib : InstanceBuilder, s: IStruct) extends DynamicValue(ib, null ,s) {
- import DynamicValue._
- override def selectDynamic(name: String) : DynamicValue = {
- transformOut(s, name, s.get(name))
- }
-
- def updateDynamic(name: String)(value: Any) {
- s.set(name, transformIn(value))
- }
-
- override def ~(v : Any): Unit = { throw new UnsupportedOperationException()}
- override def get : Any = s
-
-}
-
-class DynamicReference(ib : InstanceBuilder, val r : IReferenceableInstance) extends DynamicStruct(ib, r) {
-
- private def _trait(name : String) = new DynamicStruct(ib, r.getTrait(name))
-
- override def selectDynamic(name: String) : DynamicValue = {
- if ( r.getTraits contains name ) {
- _trait(name)
- } else {
- super.selectDynamic(name)
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
deleted file mode 100644
index 5ea345f..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
+++ /dev/null
@@ -1,188 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.typesystem.TypesDef
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.types.utils.TypesUtil
-import scala.collection.mutable.ArrayBuffer
-import scala.language.{dynamics, implicitConversions, postfixOps}
-import scala.util.DynamicVariable
-import com.google.common.collect.ImmutableSet
-
-object TypesBuilder {
-
- case class Context(enums : ArrayBuffer[EnumTypeDefinition],
- structs : ArrayBuffer[StructTypeDefinition],
- classes : ArrayBuffer[HierarchicalTypeDefinition[ClassType]],
- traits : ArrayBuffer[HierarchicalTypeDefinition[TraitType]],
- currentTypeAttrs : ArrayBuffer[Attr] = null)
-
- class AttrOption()
- class ReverseAttributeName(val rNm : String) extends AttrOption
- class MultiplicityOption(val lower: Int, val upper: Int, val isUnique: Boolean) extends AttrOption
-
- val required = new AttrOption()
- val optional = new AttrOption()
- val collection = new AttrOption()
- val set = new AttrOption()
- val composite = new AttrOption()
- val unique = new AttrOption()
- val indexed = new AttrOption()
- def reverseAttributeName(rNm : String) = new ReverseAttributeName(rNm)
- def multiplicty(lower: Int, upper: Int, isUnique: Boolean) = new MultiplicityOption(lower, upper, isUnique)
-
- val boolean = DataTypes.BOOLEAN_TYPE.getName
- val byte = DataTypes.BYTE_TYPE.getName
- val short = DataTypes.SHORT_TYPE.getName
- val int = DataTypes.INT_TYPE.getName
- val long = DataTypes.LONG_TYPE.getName
- val float = DataTypes.FLOAT_TYPE.getName
-
- val double = DataTypes.DOUBLE_TYPE.getName
- val bigint = DataTypes.BIGINTEGER_TYPE.getName
- val bigdecimal = DataTypes.BIGDECIMAL_TYPE.getName
- val date = DataTypes.DATE_TYPE.getName
- val string = DataTypes.STRING_TYPE.getName
-
- def array(t : String) : String = {
- DataTypes.arrayTypeName(t)
- }
-
- def map(kt : String, vt : String) : String = {
- DataTypes.mapTypeName(kt, vt)
- }
-
- class Attr(ctx : Context, val name : String) {
-
- private var dataTypeName : String = DataTypes.BOOLEAN_TYPE.getName
- private var multiplicity: Multiplicity = Multiplicity.OPTIONAL
- private var isComposite: Boolean = false
- private var reverseAttributeName: String = null
- private var isUnique: Boolean = false
- private var isIndexable: Boolean = false
-
- ctx.currentTypeAttrs += this
-
- def getDef : AttributeDefinition =
- new AttributeDefinition(name, dataTypeName,
- multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName)
-
- def `~`(dT : String, options : AttrOption*) : Attr = {
- dataTypeName = dT
- options.foreach { o =>
- o match {
- case `required` => {multiplicity = Multiplicity.REQUIRED}
- case `optional` => {multiplicity = Multiplicity.OPTIONAL}
- case `collection` => {multiplicity = Multiplicity.COLLECTION}
- case `set` => {multiplicity = Multiplicity.SET}
- case `composite` => {isComposite = true}
- case `unique` => {isUnique = true}
- case `indexed` => {isIndexable = true}
- case m : MultiplicityOption => {multiplicity = new Multiplicity(m.lower, m.upper, m.isUnique)}
- case r : ReverseAttributeName => {reverseAttributeName = r.rNm}
- case _ => ()
- }
- }
- this
- }
-
- }
-
-}
-
-class TypesBuilder {
-
- import org.apache.atlas.typesystem.builders.TypesBuilder.{Attr, Context}
-
- val required = TypesBuilder.required
- val optional = TypesBuilder.optional
- val collection = TypesBuilder.collection
- val set = TypesBuilder.set
- val composite = TypesBuilder.composite
- val unique = TypesBuilder.unique
- val indexed = TypesBuilder.indexed
- def multiplicty = TypesBuilder.multiplicty _
- def reverseAttributeName = TypesBuilder.reverseAttributeName _
-
- val boolean = TypesBuilder.boolean
- val byte = TypesBuilder.byte
- val short = TypesBuilder.short
- val int = TypesBuilder.int
- val long = TypesBuilder.long
- val float = TypesBuilder.float
-
- val double = TypesBuilder.double
- val bigint = TypesBuilder.bigint
- val bigdecimal = TypesBuilder.bigdecimal
- val date = TypesBuilder.date
- val string = TypesBuilder.string
-
- def array = TypesBuilder.array _
-
- def map = TypesBuilder.map _
-
- val context = new DynamicVariable[Context](Context(new ArrayBuffer(),
- new ArrayBuffer(),
- new ArrayBuffer(),
- new ArrayBuffer()))
-
- implicit def strToAttr(s : String) = new Attr(context.value, s)
-
- def types(f : => Unit ) : TypesDef = {
- f
- TypesDef(context.value.enums,
- context.value.structs,
- context.value.traits,
- context.value.classes)
- }
-
- def _class(name : String, superTypes : List[String] = List())(f : => Unit): Unit = {
- val attrs = new ArrayBuffer[Attr]()
- context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
- context.value.classes +=
- TypesUtil.createClassTypeDef(name, ImmutableSet.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
- }
-
- def _trait(name : String, superTypes : List[String] = List())(f : => Unit): Unit = {
- val attrs = new ArrayBuffer[Attr]()
- context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
- context.value.traits +=
- TypesUtil.createTraitTypeDef(name, ImmutableSet.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
- val v = context.value
- v.traits.size
- }
-
- def struct(name : String)(f : => Unit): Unit = {
- val attrs = new ArrayBuffer[Attr]()
- context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
- context.value.structs +=
- new StructTypeDefinition(name, attrs.map(_.getDef).toArray)
- }
-
- def enum(name : String, values : String*) : Unit = {
- val enums = values.zipWithIndex.map{ case (v, i) =>
- new EnumValue(v,i)
- }
- context.value.enums +=
- TypesUtil.createEnumTypeDef(name, enums:_*)
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
deleted file mode 100755
index d4bed75..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/InstanceSerialization.scala
+++ /dev/null
@@ -1,374 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import java.text.SimpleDateFormat
-
-import org.apache.atlas.typesystem._
-import org.apache.atlas.typesystem.persistence.{AtlasSystemAttributes, Id}
-import org.apache.atlas.typesystem.types._
-import org.json4s._
-import org.json4s.native.Serialization._
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import java.util.Date
-
-object InstanceSerialization {
-
- case class _Id(id : String, version : Int, typeName : String, state : Option[String])
- case class _AtlasSystemAttributes(createdBy: Option[String], modifiedBy: Option[String], createdTime: Option[Date], modifiedTime: Option[Date])
- case class _Struct(typeName : String, values : Map[String, AnyRef])
- case class _Reference(id : Option[_Id],
- typeName : String,
- values : Map[String, AnyRef],
- traitNames : List[String],
- traits : Map[String, _Struct],
- systemAttributes : Option[_AtlasSystemAttributes])
-
- def Try[B](x : => B) : Option[B] = {
- try { Some(x) } catch { case _ : Throwable => None }
- }
-
- /**
- * Convert a Map into
- * - a Reference or
- * - a Struct or
- * - a Id or
- * - a Java Map whose values are recursively converted.
- * @param jsonMap
- * @param format
- */
- class InstanceJavaConversion(jsonMap : Map[String, _], format : Formats) {
-
- /**
- * For Id, Map must contain the [[_Id]] 'typeHint'
- * @return
- */
- def idClass: Option[String] = {
- jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])).
- filter(s => s == classOf[_Id].getName)
- }
-
- /**
- * validate and extract 'id' attribute from Map
- * @return
- */
- def id: Option[String] = {
- jsonMap.get("id").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String]))
- }
-
- def createdBy: Option[String] = {
- jsonMap.get("createdBy").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String]))
- }
-
- def modifiedBy: Option[String] = {
- jsonMap.get("modifiedBy").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String]))
- }
-
- /**
- * validate and extract 'state' attribute from Map
- * @return
- */
- def state: Option[String] = {
- jsonMap.get("state").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[String]))
- }
-
- /**
- * validate and extract 'version' attribute from Map
- * @return
- */
- def version: Option[Int] = {
- jsonMap.get("version").flatMap{
- case i : Int => Some(i)
- case bI : BigInt => Some(bI.toInt)
- case _ => None
- }
- }
-
- def createdTime: Option[Date] = {
- jsonMap.get("createdTime").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[Date]))
- }
-
- def modifiedTime: Option[Date] = {
- jsonMap.get("modifiedTime").filter(_.isInstanceOf[String]).flatMap(v => Some(v.asInstanceOf[Date]))
- }
-
- /**
- * A Map is an Id if:
- * - it has the correct [[format.typeHintFieldName]]
- * - it has a 'typeName'
- * - it has an 'id'
- * - it has a 'version'
- * @return
- */
- def convertId : Option[_Id] = {
- for {
- refClass <- idClass
- typNm <- typeName
- i <- id
- s <- Some(state)
- v <- version
- } yield _Id(i, v, typNm, s)
- }
-
- def convertSystemAttributes: Option[_AtlasSystemAttributes] = {
- for {
- c <- Some(createdBy)
- m <- Some(modifiedBy)
- c_t <- Some(createdTime)
- m_t <- Some(modifiedTime)
- } yield _AtlasSystemAttributes(c, m, c_t, m_t)
- }
-
- /**
- * validate and extract 'typeName' attribute from Map
- * @return
- */
- def typeName: Option[String] = {
- jsonMap.get("typeName").flatMap(x => Try(x.asInstanceOf[String]))
- }
-
- /**
- * For Reference, Map must contain the [[_Reference]] 'typeHint'
- * @return
- */
- def referenceClass: Option[String] = {
- jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])).
- filter(s => s == classOf[_Reference].getName)
- }
-
- /**
- * For Reference, Map must contain the [[_Struct]] 'typeHint'
- * @return
- */
- def structureClass: Option[String] = {
- jsonMap.get(format.typeHintFieldName).flatMap(x => Try(x.asInstanceOf[String])).
- filter(s => s == classOf[_Struct].getName)
- }
-
- /**
- * validate and extract 'values' attribute from Map
- * @return
- */
- def valuesMap: Option[Map[String, AnyRef]] = {
- jsonMap.get("values").flatMap(x => Try(x.asInstanceOf[Map[String, AnyRef]]))
- }
-
- /**
- * validate and extract 'traitNames' attribute from Map
- * @return
- */
- def traitNames: Option[Seq[String]] = {
- jsonMap.get("traitNames").flatMap(x => Try(x.asInstanceOf[Seq[String]]))
- }
-
- /**
- * A Map is an Struct if:
- * - it has the correct [[format.typeHintFieldName]]
- * - it has a 'typeName'
- * - it has a 'values' attribute
- * @return
- */
- def struct: Option[_Struct] = {
- for {
- refClass <- structureClass
- typNm <- typeName
- values <- valuesMap
- } yield _Struct(typNm, values)
- }
-
- def sequence[A](a : List[(String,Option[A])]) : Option[List[(String,A)]] = a match {
- case Nil => Some(Nil)
- case h :: t => {
- h._2 flatMap {hh => sequence(t) map { (h._1,hh) :: _}}
- }
- }
-
- /**
- * Extract and convert the traits in this Map.
- *
- * @return
- */
- def traits: Option[Map[String, _Struct]] = {
-
- /**
- * 1. validate and extract 'traitss' attribute from Map
- * Must be a Map[String, _]
- */
- val tEntry : Option[Map[String, _]] = jsonMap.get("traits").flatMap(x => Try(x.asInstanceOf[Map[String, _]]))
-
-
- /**
- * Try to convert each entry in traits Map into a _Struct
- * - each entry itself must be of type Map[String, _]
- * - recursively call InstanceJavaConversion on this Map to convert to a struct
- */
- val x: Option[List[(String, Option[_Struct])]] = tEntry.map { tMap: Map[String, _] =>
- val y: Map[String, Option[_Struct]] = tMap.map { t =>
- val tObj: Option[_Struct] = Some(t._2).flatMap(x => Try(x.asInstanceOf[Map[String, _]])).
- flatMap { traitObj: Map[String, _] =>
- new InstanceJavaConversion(traitObj, format).struct
- }
- (t._1, tObj)
- }
- y.toList
- }
-
- /**
- * Convert a List of Optional successes into an Option of List
- */
- x flatMap (sequence(_)) map (_.toMap)
-
- }
-
- def idObject : Option[_Id] = {
- val idM = jsonMap.get("id").flatMap(x => Try(x.asInstanceOf[Map[String, _]]))
- idM flatMap (m => new InstanceJavaConversion(m, format).convertId)
- }
-
- /**
- * A Map is an Reference if:
- * - it has the correct [[format.typeHintFieldName]]
- * - it has a 'typeName'
- * - it has a 'values' attribute
- * - it has 'traitNames' attribute
- * - it has 'traits' attribute
- * @return
- */
- def reference : Option[_Reference] = {
- for {
- refClass <- referenceClass
- typNm <- typeName
- i <- Some(idObject)
- values <- valuesMap
- traitNms <- traitNames
- ts <- traits
- s_attr <- Some(convertSystemAttributes)
- } yield _Reference(i, typNm, values, traitNms.toList, ts, s_attr)
- }
-
- /**
- * A Map converted to Java:
- * - if Map can be materialized as a _Reference, materialize and then recursively call asJava on it.
- * - if Map can be materialized as a _Struct, materialize and then recursively call asJava on it.
- * - if Map can be materialized as a _Id, materialize and then recursively call asJava on it.
- * - otherwise convert each value with asJava and construct as new JavaMap.
- * @return
- */
- def convert : Any = {
- reference.map(asJava(_)(format)).getOrElse {
- struct.map(asJava(_)(format)).getOrElse {
- convertId.map(asJava(_)(format)).getOrElse {
- jsonMap.map { t =>
- (t._1 -> asJava(t._2)(format))
- }.asJava
- }
- }
- }
- }
- }
-
- def asJava(v : Any)(implicit format: Formats) : Any = v match {
- case i : _Id => new Id(i.id, i.version, i.typeName, i.state.orNull)
- case s : _Struct => new Struct(s.typeName, asJava(s.values).asInstanceOf[java.util.Map[String, Object]])
- case s_attr : _AtlasSystemAttributes => new AtlasSystemAttributes(s_attr.createdBy.orNull, s_attr.modifiedBy.orNull, s_attr.createdTime.orNull, s_attr.modifiedTime.orNull)
- case r : _Reference => {
- val id = r.id match {
- case Some(i) => new Id(i.id, i.version, i.typeName, i.state.orNull)
- case None => new Id(r.typeName)
- }
-
- val s_attr = r.systemAttributes match {
- case Some(s) => new AtlasSystemAttributes(s.createdBy.orNull, s.modifiedBy.orNull, s.createdTime.orNull, s.modifiedTime.orNull)
- case None => new AtlasSystemAttributes()
- }
- new Referenceable(id,
- r.typeName,
- asJava(r.values).asInstanceOf[java.util.Map[String, Object]],
- asJava(r.traitNames).asInstanceOf[java.util.List[String]],
- asJava(r.traits).asInstanceOf[java.util.Map[String, IStruct]], s_attr)
- }
- case l : List[_] => l.map(e => asJava(e)).asJava
- case m : Map[_, _] if Try{m.asInstanceOf[Map[String,_]]}.isDefined => {
- if (m.keys.size == 2 && m.keys.contains("value") && m.keys.contains("ordinal")) {
- new EnumValue(m.get("value").toString, m.get("ordinal").asInstanceOf[BigInt].intValue())
- } else {
- new InstanceJavaConversion(m.asInstanceOf[Map[String,_]], format).convert
- }
- }
-
- case _ => v
- }
-
- def asScala(v : Any) : Any = v match {
- case i : Id => _Id(i._getId(), i.getVersion, i.getClassName, Some(i.getStateAsString))
- case s_attr: AtlasSystemAttributes => _AtlasSystemAttributes(Some(s_attr.createdBy), Some(s_attr.modifiedBy), Some(s_attr.createdTime), Some(s_attr.modifiedTime))
- case r : IReferenceableInstance => {
- val traits = r.getTraits.map { tName =>
- val t = r.getTrait(tName).asInstanceOf[IStruct]
- (tName -> _Struct(t.getTypeName, asScala(t.getValuesMap).asInstanceOf[Map[String, AnyRef]]))
- }.toMap
- _Reference(Some(asScala(r.getId).asInstanceOf[_Id]),
- r.getTypeName, asScala(r.getValuesMap).asInstanceOf[Map[String, AnyRef]],
- asScala(r.getTraits).asInstanceOf[List[String]],
- traits.asInstanceOf[Map[String, _Struct]], Some(asScala(r.getSystemAttributes).asInstanceOf[_AtlasSystemAttributes]))
- }
- case s : IStruct => _Struct(s.getTypeName, asScala(s.getValuesMap).asInstanceOf[Map[String, AnyRef]])
- case l : java.util.List[_] => l.asScala.map(e => asScala(e)).toList
- case m : java.util.Map[_, _] => m.asScala.map(t => (asScala(t._1), asScala(t._2))).toMap
- case _ => v
- }
-
- val _formats = new DefaultFormats {
- override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat]
- override val typeHints = FullTypeHints(List(classOf[_Id], classOf[_Struct], classOf[_Reference]))
- }
-
- def buildFormat(withBigDecimals : Boolean) = {
- if (withBigDecimals)
- _formats + new BigDecimalSerializer + new BigIntegerSerializer
- else
- _formats
- }
-
- def _toJson(value: AnyRef, withBigDecimals : Boolean = false): String = {
- implicit val formats = buildFormat(withBigDecimals)
-
- val _s : AnyRef = asScala(value).asInstanceOf[AnyRef]
- write(_s)
- }
-
- def toJson(value: IStruct, withBigDecimals : Boolean = false): String = {
- _toJson(value, withBigDecimals)
- }
-
- def fromJsonStruct(jsonStr: String, withBigDecimals : Boolean = false): Struct = {
- implicit val formats = buildFormat(withBigDecimals)
- val _s = read[_Struct](jsonStr)
- asJava(_s).asInstanceOf[Struct]
- }
-
- //def toJsonReferenceable(value: Referenceable, withBigDecimals : Boolean = false): String = _toJson(value, withBigDecimals)
- def fromJsonReferenceable(jsonStr: String, withBigDecimals : Boolean = false): Referenceable = {
- implicit val formats = buildFormat(withBigDecimals)
- val _s = read[_Reference](jsonStr)
- asJava(_s).asInstanceOf[Referenceable]
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
deleted file mode 100755
index ca4dd7f..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/Serialization.scala
+++ /dev/null
@@ -1,348 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import org.apache.atlas.typesystem._
-import org.apache.atlas.typesystem.persistence.{AtlasSystemAttributes, Id, ReferenceableInstance, StructInstance}
-import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory}
-import org.apache.atlas.typesystem.types._
-import org.json4s.JsonAST.JInt
-import org.json4s.{JsonAST, _}
-import org.json4s.native.Serialization._
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConverters._
-import java.util.Date
-
-class BigDecimalSerializer extends CustomSerializer[java.math.BigDecimal](format => (
- {
- case JDecimal(e) => e.bigDecimal
- },
- {
- case e: java.math.BigDecimal => JDecimal(new BigDecimal(e))
- }
- ))
-
-class BigIntegerSerializer extends CustomSerializer[java.math.BigInteger](format => (
- {
- case JInt(e) => e.bigInteger
- },
- {
- case e: java.math.BigInteger => JInt(new BigInt(e))
- }
- ))
-
-class IdSerializer extends CustomSerializer[Id](format => ( {
- case JObject(JField("id", JInt(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, typeName)
- case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("id", JInt(id)) ::
- JField("version", JInt(version)) :: Nil) => new Id(id.toLong, version.toInt, typeName)
- case JObject(JField("id", JString(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, typeName)
- case JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("id", JString(id)) ::
- JField("version", JString(version)) :: Nil) => new Id(id, version.toInt, typeName)
-
-}, {
- case id: Id => JObject(JField("id", JString(id.id)),
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.typeName)),
- JField("version", JInt(id.version)))
-}
- ))
-
-class TypedStructSerializer() extends Serializer[ITypedStruct] {
-
- def deserialize(implicit format: Formats) = {
- case (TypeInfo(clazz, ptype), json) if classOf[ITypedStruct].isAssignableFrom(clazz) => json match {
- case JObject(fs) =>
- val (typ, fields) = fs.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME)
- val typName = typ(0)._2.asInstanceOf[JString].s
- val sT = typSystem.getDataType(
- classOf[IConstructableType[IStruct, ITypedStruct]], typName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]]
- val s = sT.createInstance()
- Serialization.deserializeFields(typSystem, sT, s, fields)
- s
- case x => throw new MappingException("Can't convert " + x + " to TypedStruct")
- }
-
- }
-
- def typSystem = TypeSystem.getInstance()
-
- /**
- * Implicit conversion from `java.math.BigInteger` to `scala.BigInt`.
- * match the builtin conversion for BigDecimal.
- * See https://groups.google.com/forum/#!topic/scala-language/AFUamvxu68Q
- */
- //implicit def javaBigInteger2bigInt(x: java.math.BigInteger): BigInt = new BigInt(x)
-
- def serialize(implicit format: Formats) = {
- case e: ITypedStruct =>
- val fields = Serialization.serializeFields(e)
- JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields)
- }
-}
-
-class TypedReferenceableInstanceSerializer()
- extends Serializer[ITypedReferenceableInstance] {
-
- def deserialize(implicit format: Formats) = {
- case (TypeInfo(clazz, ptype), json) if classOf[ITypedReferenceableInstance].isAssignableFrom(clazz) => json match {
- case JObject(JField("id", JInt(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JInt(version)) ::
- JField("state", JString(state)) :: Nil) => new Id(id.toLong, version.toInt, typeName, state)
- case JObject(JField("id", JString(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JInt(version)) ::
- JField("state", JString(state)) :: Nil) => new Id(id, version.toInt, typeName, state)
- case JObject(fs) =>
- var typField: Option[JField] = None
- var idField: Option[JField] = None
- var traitsField: Option[JField] = None
- var sysAttrField: Option[JField] = None
- var fields: List[JField] = Nil
-
- fs.foreach { f: JField => f._1 match {
- case Serialization.STRUCT_TYPE_FIELD_NAME => typField = Some(f)
- case Serialization.ID_TYPE_FIELD_NAME => idField = Some(f)
- case Serialization.TRAIT_TYPE_FIELD_NAME => traitsField = Some(f)
- case Serialization.SYSTEM_ATTR_FIELD_NAME => sysAttrField = Some(f)
- case _ => fields = fields :+ f
- }
- }
-
- var traitNames: List[String] = Nil
-
- traitsField.map { t =>
- val tObj: JObject = t._2.asInstanceOf[JObject]
- tObj.obj.foreach { oTrait =>
- val tName: String = oTrait._1
- traitNames = traitNames :+ tName
- }
- }
-
- val typName = typField.get._2.asInstanceOf[JString].s
- val sT = typSystem.getDataType(
- classOf[ClassType], typName).asInstanceOf[ClassType]
- val id = Serialization.deserializeId(idField.get._2)
- val s_attr = Serialization.deserializeSystemAttributes(sysAttrField.get._2)
- val s = sT.createInstance(id, s_attr, traitNames: _*)
- Serialization.deserializeFields(typSystem, sT, s, fields)
-
- traitsField.map { t =>
- val tObj: JObject = t._2.asInstanceOf[JObject]
- tObj.obj.foreach { oTrait =>
- val tName: String = oTrait._1
- val traitJObj: JObject = oTrait._2.asInstanceOf[JObject]
- val traitObj = s.getTrait(tName).asInstanceOf[ITypedStruct]
- val tT = typSystem.getDataType(
- classOf[TraitType], traitObj.getTypeName).asInstanceOf[TraitType]
- val (tTyp, tFields) = traitJObj.obj.partition(f => f._1 == Serialization.STRUCT_TYPE_FIELD_NAME)
- Serialization.deserializeFields(typSystem, tT, traitObj, tFields)
- }
- }
-
- s
- case x => throw new MappingException("Can't convert " + x + " to TypedStruct")
- }
-
- }
-
- def typSystem = TypeSystem.getInstance()
-
- def serialize(implicit format: Formats) = {
- case id: Id => Serialization.serializeId(id)
- case e: ITypedReferenceableInstance =>
- val idJ = JField(Serialization.ID_TYPE_FIELD_NAME, Serialization.serializeId(e.getId))
- val s_attrJ = JField(Serialization.SYSTEM_ATTR_FIELD_NAME, Serialization.serializeSystemAttributes(e.getSystemAttributes))
- var fields = Serialization.serializeFields(e)
- val traitsJ: List[JField] = e.getTraits.map(tName => JField(tName, Extraction.decompose(e.getTrait(tName)))).toList
-
- fields = idJ :: s_attrJ :: fields
- if (traitsJ.size > 0) {
- fields = fields :+ JField(Serialization.TRAIT_TYPE_FIELD_NAME, JObject(traitsJ: _*))
- }
-
- JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(e.getTypeName)) :: fields)
- }
-}
-
-
-object Serialization {
- val STRUCT_TYPE_FIELD_NAME = "$typeName$"
- val ID_TYPE_FIELD_NAME = "$id$"
- val TRAIT_TYPE_FIELD_NAME = "$traits$"
- val SYSTEM_ATTR_FIELD_NAME = "$systemAttributes$"
-
- def extractList(lT: ArrayType, value: JArray)(implicit format: Formats): Any = {
- val dT = lT.getElemType
- value.arr.map(extract(dT, _)).asJava
- }
-
- def extractMap(mT: MapType, value: JObject)(implicit format: Formats): Any = {
- val kT = mT.getKeyType
- val vT = mT.getValueType
- value.obj.map { f: JField => f._1 -> extract(vT, f._2)}.toMap.asJava
- }
-
- def extract(dT: IDataType[_], value: JValue)(implicit format: Formats): Any = value match {
- case value: JBool => Extraction.extract[Boolean](value)
- case value: JInt => Extraction.extract[Int](value)
- case value: JDouble => Extraction.extract[Double](value)
- case value: JDecimal => Extraction.extract[BigDecimal](value)
- case value: JString => Extraction.extract[String](value)
- case JNull => null
- case value: JArray => extractList(dT.asInstanceOf[ArrayType], value.asInstanceOf[JArray])
- case value: JObject if dT.getTypeCategory eq TypeCategory.MAP =>
- extractMap(dT.asInstanceOf[MapType], value.asInstanceOf[JObject])
- case value: JObject if ((dT.getTypeCategory eq TypeCategory.STRUCT) || (dT.getTypeCategory eq TypeCategory.TRAIT)) =>
- Extraction.extract[ITypedStruct](value)
- case value: JObject =>
- Extraction.extract[ITypedReferenceableInstance](value)
- }
-
- def serializeId(id: Id) = JObject(JField("id", JString(id.id)),
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(id.typeName)),
- JField("version", JInt(id.version)), JField("state", JString(id.state.name())))
-
-
- //Handling serialization issues with null values
- //See https://github.com/json4s/json4s/issues/358
- def parseString(s: Any) = s match {
- case s:String => JString(s)
- case s:Date => JString(s.toString)
- case _ => JString("")
- }
-
- def serializeSystemAttributes(s_attr: AtlasSystemAttributes) = JObject(
- JField("createdBy", parseString(s_attr.modifiedBy)),
- JField("modifiedBy", parseString(s_attr.modifiedBy)),
- JField("createdTime", parseString(s_attr.createdTime)),
- JField("modifiedTime", parseString(s_attr.modifiedTime))
- )
-
- def serializeFields(e: ITypedInstance)(implicit format: Formats) = e.fieldMapping.fields.map {
- case (fName, info) => {
- var v = e.get(fName)
- if (v != null && (info.dataType().getTypeCategory eq TypeCategory.MAP)) {
- v = v.asInstanceOf[java.util.Map[_, _]].toMap
- }
-
- if (v != null && (info.dataType().getTypeCategory eq TypeCategory.CLASS) && !info.isComposite) {
- v = v.asInstanceOf[IReferenceableInstance].getId
- }
-
- if (v != null && (info.dataType().getTypeCategory eq TypeCategory.ENUM)) {
- v = v.asInstanceOf[EnumValue].value
- }
-
- JField(fName, Extraction.decompose(v))
- }
- }.toList.map(_.asInstanceOf[JField])
-
- def deserializeFields[T <: ITypedInstance](typeSystem: TypeSystem,
- sT: IConstructableType[_, T],
- s: T, fields: List[JField])(implicit format: Formats)
- = {
- //MetadataService.setCurrentService(currentMdSvc)
- fields.foreach { f =>
- val fName = f._1
- val fInfo = sT.fieldMapping.fields(fName)
- if (fInfo != null) {
- //println(fName)
- var v = f._2
- if (fInfo.dataType().getTypeCategory == TypeCategory.TRAIT ||
- fInfo.dataType().getTypeCategory == TypeCategory.STRUCT) {
- v = v match {
- case JObject(sFields) =>
- JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(fInfo.dataType.getName)) :: sFields)
- case x => x
- }
- }
- s.set(fName, Serialization.extract(fInfo.dataType(), v))
- }
- }
- }
-
- def deserializeId(value: JValue)(implicit format: Formats) = value match {
- case JObject(JField("id", JInt(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JInt(version)) ::
- JField("state", JString(state)) :: Nil) => new Id(id.toLong, version.toInt, typeName, state)
- case JObject(JField("id", JString(id)) ::
- JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) ::
- JField("version", JInt(version)) ::
- JField("state", JString(state)) :: Nil) => new Id(id, version.toInt, typeName, state)
- }
-
- def deserializeSystemAttributes(value: JValue)(implicit format : Formats) = value match {
- case JObject(JField("createdBy", JString(createdBy))::
- JField("modifiedBy", JString(modifiedBy))::
- JField("createdTime", JString(createdTime))::
- JField("modifiedTime", JString(modifiedTime))::Nil) => new AtlasSystemAttributes(createdBy, modifiedBy, createdTime, modifiedTime)
- }
-
- def toJson(value: ITypedReferenceableInstance): String = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- write(value)
- }
-
- def toJson(value: ITypedInstance): String = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- write(value)
- }
-
- def toJsonPretty(value: ITypedReferenceableInstance): String = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- writePretty(value)
- }
-
- def fromJson(jsonStr: String): ITypedReferenceableInstance = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- read[ReferenceableInstance](jsonStr)
- }
-
- def traitFromJson(jsonStr: String): ITypedInstance = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- read[StructInstance](jsonStr)
- }
-
- def arrayFromJson(jsonStr: String): ITypedInstance = {
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer
-
- read[StructInstance](jsonStr)
- }
-}
-
-
[20/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryDeleteTestBase.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryDeleteTestBase.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryDeleteTestBase.java
deleted file mode 100644
index 0834601..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryDeleteTestBase.java
+++ /dev/null
@@ -1,1205 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.NullRequiredAttributeException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.*;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotEquals;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-/**
- * Test for GraphBackedMetadataRepository.deleteEntities
- *
- * Guice loads the dependencies and injects the necessary objects
- *
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public abstract class GraphBackedMetadataRepositoryDeleteTestBase {
-
- protected MetadataRepository repositoryService;
-
- private TypeSystem typeSystem;
-
- private ClassType compositeMapOwnerType;
-
- private ClassType compositeMapValueType;
-
- @Inject
- AtlasGraph atlasGraph;
-
- @BeforeClass
- public void setUp() throws Exception {
-
- typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
-
- new GraphBackedSearchIndexer(new AtlasTypeRegistry());
- final GraphBackedMetadataRepository delegate = new GraphBackedMetadataRepository(getDeleteHandler(typeSystem), atlasGraph);
-
- repositoryService = TestUtils.addTransactionWrapper(delegate);
-
- TestUtils.defineDeptEmployeeTypes(typeSystem);
- TestUtils.createHiveTypes(typeSystem);
-
- // Define type for map value.
- HierarchicalTypeDefinition<ClassType> mapValueDef = TypesUtil.createClassTypeDef("CompositeMapValue",
- ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE));
-
- // Define type with map where the value is a composite class reference to MapValue.
- HierarchicalTypeDefinition<ClassType> mapOwnerDef = TypesUtil.createClassTypeDef("CompositeMapOwner",
- ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- new AttributeDefinition("map", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "CompositeMapValue"), Multiplicity.OPTIONAL, true, null));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(mapOwnerDef, mapValueDef));
- typeSystem.defineTypes(typesDef);
- compositeMapOwnerType = typeSystem.getDataType(ClassType.class, "CompositeMapOwner");
- compositeMapValueType = typeSystem.getDataType(ClassType.class, "CompositeMapValue");
- }
-
- abstract DeleteHandler getDeleteHandler(TypeSystem typeSystem);
-
- @BeforeMethod
- public void setupContext() {
- TestUtils.resetRequestContext();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- TypeSystem.getInstance().reset();
-// AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testDeleteAndCreate() throws Exception {
- Referenceable entity = createDBEntity();
- String id = createInstance(entity);
-
- //get entity by unique attribute should return the created entity
- ITypedReferenceableInstance instance =
- repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
- assertEquals(instance.getId()._getId(), id);
-
- //delete entity should mark it as deleted
- List<String> results = deleteEntities(id).getDeletedEntities();
- assertEquals(results.get(0), id);
- assertEntityDeleted(id);
-
- //get entity by unique attribute should throw EntityNotFoundException
- try {
- repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
- fail("Expected EntityNotFoundException");
- } catch(EntityNotFoundException e) {
- //expected
- }
-
- //Create the same entity again, should create new entity
- String newId = createInstance(entity);
- assertNotEquals(id, newId);
-
- //get by unique attribute should return the new entity
- instance = repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", entity.get("name"));
- assertEquals(instance.getId()._getId(), newId);
- }
-
- @Test
- public void testDeleteEntityWithTraits() throws Exception {
- Referenceable entity = createDBEntity();
- String id = createInstance(entity);
-
- TraitType dataType = typeSystem.getDataType(TraitType.class, PII);
- ITypedStruct trait = dataType.convert(new Struct(TestUtils.PII), Multiplicity.REQUIRED);
- repositoryService.addTrait(id, trait);
-
- ITypedReferenceableInstance instance = repositoryService.getEntityDefinition(id);
- assertTrue(instance.getTraits().contains(PII));
-
- deleteEntities(id);
- assertEntityDeleted(id);
- assertTestDeleteEntityWithTraits(id);
- }
-
- protected abstract void assertTestDeleteEntityWithTraits(String guid)
- throws EntityNotFoundException, RepositoryException, Exception;
-
- @Test
- public void testDeleteReference() throws Exception {
- //Deleting column should update table
- Referenceable db = createDBEntity();
- String dbId = createInstance(db);
-
- Referenceable column = createColumnEntity();
- String colId = createInstance(column);
-
- Referenceable table = createTableEntity(dbId);
- table.set(COLUMNS_ATTR_NAME, Arrays.asList(new Id(colId, 0, COLUMN_TYPE)));
- String tableId = createInstance(table);
-
- EntityResult entityResult = deleteEntities(colId);
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertEquals(entityResult.getDeletedEntities().get(0), colId);
- assertEquals(entityResult.getUpdateEntities().size(), 1);
- assertEquals(entityResult.getUpdateEntities().get(0), tableId);
-
- assertEntityDeleted(colId);
-
- ITypedReferenceableInstance tableInstance = repositoryService.getEntityDefinition(tableId);
- assertColumnForTestDeleteReference(tableInstance);
-
- //Deleting table should update process
- Referenceable process = new Referenceable(PROCESS_TYPE);
- process.set(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS, Arrays.asList(new Id(tableId, 0, TABLE_TYPE)));
- String processId = createInstance(process);
- ITypedReferenceableInstance processInstance = repositoryService.getEntityDefinition(processId);
-
- deleteEntities(tableId);
- assertEntityDeleted(tableId);
-
- assertTableForTestDeleteReference(tableId);
- assertProcessForTestDeleteReference(processInstance);
- }
-
- protected abstract void assertTableForTestDeleteReference(String tableId) throws Exception;
-
- protected abstract void assertColumnForTestDeleteReference(ITypedReferenceableInstance tableInstance)
- throws AtlasException;
-
- protected abstract void assertProcessForTestDeleteReference(ITypedReferenceableInstance processInstance) throws Exception;
-
- protected abstract void assertEntityDeleted(String id) throws Exception;
-
- private EntityResult deleteEntities(String... id) throws Exception {
- RequestContext.createContext();
- return repositoryService.deleteEntities(Arrays.asList(id));
- }
-
- private String createInstance(Referenceable entity) throws Exception {
- ClassType dataType = typeSystem.getDataType(ClassType.class, entity.getTypeName());
- ITypedReferenceableInstance instance = dataType.convert(entity, Multiplicity.REQUIRED);
- CreateUpdateEntitiesResult result = repositoryService.createEntities(instance);
- List<String> results = result.getCreatedEntities();
- return results.get(results.size() - 1);
- }
-
- @Test
- public void testDeleteEntities() throws Exception {
- // Create a table entity, with 3 composite column entities
- Referenceable dbEntity = createDBEntity();
- String dbGuid = createInstance(dbEntity);
- Referenceable table1Entity = createTableEntity(dbGuid);
- Referenceable col1 = createColumnEntity();
- Referenceable col2 = createColumnEntity();
- Referenceable col3 = createColumnEntity();
- table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col3));
- createInstance(table1Entity);
-
- // Retrieve the table entities from the Repository, to get their guids and the composite column guids.
- ITypedReferenceableInstance tableInstance = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE,
- NAME, table1Entity.get(NAME));
- List<IReferenceableInstance> columns = (List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
-
- //Delete column
- String colId = columns.get(0).getId()._getId();
- String tableId = tableInstance.getId()._getId();
-
- EntityResult entityResult = deleteEntities(colId);
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertEquals(entityResult.getDeletedEntities().get(0), colId);
- assertEquals(entityResult.getUpdateEntities().size(), 1);
- assertEquals(entityResult.getUpdateEntities().get(0), tableId);
- assertEntityDeleted(colId);
-
- tableInstance = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, table1Entity.get(NAME));
- assertDeletedColumn(tableInstance);
-
- //update by removing a column
- tableInstance.set(COLUMNS_ATTR_NAME, ImmutableList.of(col3));
- entityResult = updatePartial(tableInstance);
- colId = columns.get(1).getId()._getId();
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertEquals(entityResult.getDeletedEntities().get(0), colId);
- assertEntityDeleted(colId);
-
- // Delete the table entities. The deletion should cascade to their composite columns.
- tableInstance = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE, NAME, table1Entity.get(NAME));
- List<String> deletedGuids = deleteEntities(tableInstance.getId()._getId()).getDeletedEntities();
- assertEquals(deletedGuids.size(), 2);
-
- // Verify that deleteEntities() response has guids for tables and their composite columns.
- Assert.assertTrue(deletedGuids.contains(tableInstance.getId()._getId()));
- Assert.assertTrue(deletedGuids.contains(columns.get(2).getId()._getId()));
-
- // Verify that tables and their composite columns have been deleted from the graph Repository.
- for (String guid : deletedGuids) {
- assertEntityDeleted(guid);
- }
- assertTestDeleteEntities(tableInstance);
- }
-
- protected abstract void assertDeletedColumn(ITypedReferenceableInstance tableInstance) throws AtlasException;
-
- protected abstract void assertTestDeleteEntities(ITypedReferenceableInstance tableInstance) throws Exception;
-
- /**
- * Verify deleting entities with composite references to other entities.
- * The composite entities should also be deleted.
- */
- @Test
- public void testDeleteEntitiesWithCompositeArrayReference() throws Exception {
- String hrDeptGuid = createHrDeptGraph();
-
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
- Assert.assertEquals(employees.size(), 4);
-
- List<String> employeeGuids = new ArrayList(4);
- for (ITypedReferenceableInstance employee : employees) {
- employeeGuids.add(employee.getId()._getId());
- }
-
- // There should be 4 vertices for Address structs (one for each Person.address attribute value).
- int vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address").size();
- Assert.assertEquals(vertexCount, 4);
- vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance").size();
- Assert.assertEquals(vertexCount, 1);
-
- List<String> deletedEntities = deleteEntities(hrDeptGuid).getDeletedEntities();
- assertTrue(deletedEntities.contains(hrDeptGuid));
- assertEntityDeleted(hrDeptGuid);
-
- // Verify Department entity and its contained Person entities were deleted.
- for (String employeeGuid : employeeGuids) {
- assertTrue(deletedEntities.contains(employeeGuid));
- assertEntityDeleted(employeeGuid);
- }
-
- // Verify all Person.address struct vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "Address"));
-
- // Verify all SecurityClearance trait vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "SecurityClearance"));
- }
-
- protected abstract void assertVerticesDeleted(List<AtlasVertex> vertices);
-
- @Test
- public void testDeleteEntitiesWithCompositeMapReference() throws Exception {
- // Create instances of MapOwner and MapValue.
- // Set MapOwner.map with one entry that references MapValue instance.
- ITypedReferenceableInstance entityDefinition = createMapOwnerAndValueEntities();
- String mapOwnerGuid = entityDefinition.getId()._getId();
-
- // Verify MapOwner.map attribute has expected value.
- ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- Object object = mapOwnerInstance.get("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, ITypedReferenceableInstance> map = (Map<String, ITypedReferenceableInstance>)object;
- Assert.assertEquals(map.size(), 1);
- ITypedReferenceableInstance mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- String mapValueGuid = mapValueInstance.getId()._getId();
- String edgeLabel = GraphHelper.getEdgeLabel(compositeMapOwnerType, compositeMapOwnerType.fieldMapping.fields.get("map"));
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
-
- List<String> deletedEntities = deleteEntities(mapOwnerGuid).getDeletedEntities();
- Assert.assertEquals(deletedEntities.size(), 2);
- Assert.assertTrue(deletedEntities.contains(mapOwnerGuid));
- Assert.assertTrue(deletedEntities.contains(mapValueGuid));
-
- assertEntityDeleted(mapOwnerGuid);
- assertEntityDeleted(mapValueGuid);
- }
-
- private ITypedReferenceableInstance createMapOwnerAndValueEntities()
- throws AtlasException, RepositoryException, EntityExistsException {
-
- ITypedReferenceableInstance mapOwnerInstance = compositeMapOwnerType.createInstance();
- mapOwnerInstance.set(NAME, TestUtils.randomString());
- ITypedReferenceableInstance mapValueInstance = compositeMapValueType.createInstance();
- mapValueInstance.set(NAME, TestUtils.randomString());
- mapOwnerInstance.set("map", Collections.singletonMap("value1", mapValueInstance));
- List<String> createEntitiesResult = repositoryService.createEntities(mapOwnerInstance, mapValueInstance).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- ITypedReferenceableInstance entityDefinition = repositoryService.getEntityDefinition("CompositeMapOwner",
- NAME, mapOwnerInstance.get(NAME));
- return entityDefinition;
- }
-
- private EntityResult updatePartial(ITypedReferenceableInstance entity) throws RepositoryException {
- RequestContext.createContext();
- return repositoryService.updatePartial(entity).getEntityResult();
- }
-
- @Test
- public void testUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
- String hrDeptGuid = createHrDeptGraph();
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDept);
-
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition(nameGuidMap.get("John"));
- Id johnGuid = john.getId();
-
- ITypedReferenceableInstance max = repositoryService.getEntityDefinition(nameGuidMap.get("Max"));
- String maxGuid = max.getId()._getId();
- AtlasVertex vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long creationTimestamp = GraphHelper.getSingleValuedProperty(vertex, Constants.TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(creationTimestamp);
-
- Long modificationTimestampPreUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPreUpdate);
-
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(nameGuidMap.get("Jane"));
- Id janeId = jane.getId();
-
- // Update max's mentor reference to john.
- ClassType personType = typeSystem.getDataType(ClassType.class, "Person");
- ITypedReferenceableInstance maxEntity = personType.createInstance(max.getId());
- maxEntity.set("mentor", johnGuid);
- EntityResult entityResult = updatePartial(maxEntity);
- assertEquals(entityResult.getUpdateEntities().size(), 1);
- assertTrue(entityResult.getUpdateEntities().contains(maxGuid));
-
- // Verify the update was applied correctly - john should now be max's mentor.
- max = repositoryService.getEntityDefinition(maxGuid);
- ITypedReferenceableInstance refTarget = (ITypedReferenceableInstance) max.get("mentor");
- Assert.assertEquals(refTarget.getId()._getId(), johnGuid._getId());
-
- // Verify modification timestamp was updated.
- vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long modificationTimestampPostUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPostUpdate);
- Assert.assertTrue(creationTimestamp < modificationTimestampPostUpdate);
-
- // Update max's mentor reference to jane.
- maxEntity.set("mentor", janeId);
- entityResult = updatePartial(maxEntity);
- assertEquals(entityResult.getUpdateEntities().size(), 1);
- assertTrue(entityResult.getUpdateEntities().contains(maxGuid));
-
- // Verify the update was applied correctly - jane should now be max's mentor.
- max = repositoryService.getEntityDefinition(maxGuid);
- refTarget = (ITypedReferenceableInstance) max.get("mentor");
- Assert.assertEquals(refTarget.getId()._getId(), janeId._getId());
-
- // Verify modification timestamp was updated.
- vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long modificationTimestampPost2ndUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPost2ndUpdate);
- Assert.assertTrue(modificationTimestampPostUpdate < modificationTimestampPost2ndUpdate);
-
- ITypedReferenceableInstance julius = repositoryService.getEntityDefinition(nameGuidMap.get("Julius"));
- Id juliusId = julius.getId();
- maxEntity = personType.createInstance(max.getId());
- maxEntity.set("manager", juliusId);
- entityResult = updatePartial(maxEntity);
- // Verify julius' subordinates were updated.
- assertEquals(entityResult.getUpdateEntities().size(), 3);
- assertTrue(entityResult.getUpdateEntities().contains(maxGuid));
- assertTrue(entityResult.getUpdateEntities().containsAll(Arrays.asList(maxGuid, janeId._getId(), juliusId._getId())));
-
- // Verify the update was applied correctly - julius should now be max's manager.
- max = repositoryService.getEntityDefinition(maxGuid);
- refTarget = (ITypedReferenceableInstance) max.get("manager");
- Assert.assertEquals(refTarget.getId()._getId(), juliusId._getId());
- Assert.assertEquals(refTarget.getId()._getId(), juliusId._getId());
- julius = repositoryService.getEntityDefinition(nameGuidMap.get("Julius"));
- Object object = julius.get("subordinates");
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 1);
- Assert.assertTrue(refValues.contains(max.getId()));
-
- assertTestUpdateEntity_MultiplicityOneNonCompositeReference(janeId._getId());
- }
-
- protected abstract void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(String janeGuid) throws Exception;
-
- /**
- * Verify deleting an entity which is contained by another
- * entity through a bi-directional composite reference.
- *
- * @throws Exception
- */
- @Test
- public void testDisconnectBidirectionalReferences() throws Exception {
- String hrDeptGuid = createHrDeptGraph();
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDept);
- String maxGuid = nameGuidMap.get("Max");
- String janeGuid = nameGuidMap.get("Jane");
- String johnGuid = nameGuidMap.get("John");
-
- Assert.assertNotNull(maxGuid);
- Assert.assertNotNull(janeGuid);
- Assert.assertNotNull(johnGuid);
-
- // Verify that Max is one of Jane's subordinates.
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(janeGuid);
- Object refValue = jane.get("subordinates");
- Assert.assertTrue(refValue instanceof List);
- List<Object> subordinates = (List<Object>)refValue;
- Assert.assertEquals(subordinates.size(), 2);
- List<String> subordinateIds = new ArrayList<>(2);
- for (Object listValue : subordinates) {
- Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
- subordinateIds.add(employee.getId()._getId());
- }
- Assert.assertTrue(subordinateIds.contains(maxGuid));
-
-
- EntityResult entityResult = deleteEntities(maxGuid);
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition("Person", "name", "John");
-
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertTrue(entityResult.getDeletedEntities().contains(maxGuid));
- assertEquals(entityResult.getUpdateEntities().size(), 3);
- assertTrue(entityResult.getUpdateEntities().containsAll(Arrays.asList(jane.getId()._getId(), hrDeptGuid,
- john.getId()._getId())));
- assertEntityDeleted(maxGuid);
-
- assertMaxForTestDisconnectBidirectionalReferences(nameGuidMap);
-
- // Now delete jane - this should disconnect the manager reference from her
- // subordinate.
- entityResult = deleteEntities(janeGuid);
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertTrue(entityResult.getDeletedEntities().contains(janeGuid));
- assertEquals(entityResult.getUpdateEntities().size(), 2);
- assertTrue(entityResult.getUpdateEntities().containsAll(Arrays.asList(hrDeptGuid, john.getId()._getId())));
-
- assertEntityDeleted(janeGuid);
-
- john = repositoryService.getEntityDefinition("Person", "name", "John");
- assertJohnForTestDisconnectBidirectionalReferences(john, janeGuid);
- }
-
- protected abstract void assertJohnForTestDisconnectBidirectionalReferences(ITypedReferenceableInstance john,
- String janeGuid) throws Exception;
-
- protected abstract void assertMaxForTestDisconnectBidirectionalReferences(Map<String, String> nameGuidMap)
- throws Exception;
-
- /**
- * Verify deleting entity that is the target of a unidirectional class array reference
- * from a class instance.
- */
- @Test
- public void testDisconnectUnidirectionalArrayReferenceFromClassType() throws Exception {
- createDbTableGraph(TestUtils.DATABASE_NAME, TestUtils.TABLE_NAME);
-
- // Get the guid for one of the table's columns.
- ITypedReferenceableInstance table = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE, "name", TestUtils.TABLE_NAME);
- String tableGuid = table.getId()._getId();
- List<ITypedReferenceableInstance> columns = (List<ITypedReferenceableInstance>) table.get("columns");
- Assert.assertEquals(columns.size(), 5);
- String columnGuid = columns.get(0).getId()._getId();
-
- // Delete the column.
- EntityResult entityResult = deleteEntities(columnGuid);
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- Assert.assertTrue(entityResult.getDeletedEntities().contains(columnGuid));
- assertEquals(entityResult.getUpdateEntities().size(), 1);
- Assert.assertTrue(entityResult.getUpdateEntities().contains(tableGuid));
- assertEntityDeleted(columnGuid);
-
- // Verify table.columns reference to the deleted column has been disconnected.
- table = repositoryService.getEntityDefinition(tableGuid);
- assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- (List<ITypedReferenceableInstance>) table.get("columns"), columnGuid);
- }
-
- protected abstract void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- List<ITypedReferenceableInstance> columns, String columnGuid);
-
- /**
- * Verify deleting entities that are the target of a unidirectional class array reference
- * from a struct or trait instance.
- */
- @Test
- public void testDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes() throws Exception {
- // Define class types.
- HierarchicalTypeDefinition<ClassType> structTargetDef = TypesUtil.createClassTypeDef("StructTarget",
- ImmutableSet.<String>of(), TypesUtil.createOptionalAttrDef("attr1", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> traitTargetDef = TypesUtil.createClassTypeDef("TraitTarget",
- ImmutableSet.<String>of(), TypesUtil.createOptionalAttrDef("attr1", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> structContainerDef = TypesUtil.createClassTypeDef("StructContainer",
- ImmutableSet.<String>of(), TypesUtil.createOptionalAttrDef("struct", "TestStruct"));
-
- // Define struct and trait types which have a unidirectional array reference
- // to a class type.
- StructTypeDefinition structDef = TypesUtil.createStructTypeDef("TestStruct",
- new AttributeDefinition("target", DataTypes.arrayTypeName("StructTarget"), Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition("nestedStructs", DataTypes.arrayTypeName("NestedStruct"), Multiplicity.OPTIONAL, false, null));
- StructTypeDefinition nestedStructDef = TypesUtil.createStructTypeDef("NestedStruct",
- TypesUtil.createOptionalAttrDef("attr1", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> traitDef = TypesUtil.createTraitTypeDef("TestTrait", ImmutableSet.<String>of(),
- new AttributeDefinition("target", DataTypes.arrayTypeName("TraitTarget"), Multiplicity.OPTIONAL, false, null));
-
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.of(structDef, nestedStructDef),
- ImmutableList.of(traitDef), ImmutableList.of(structTargetDef, traitTargetDef, structContainerDef));
- typeSystem.defineTypes(typesDef);
-
- // Create instances of class, struct, and trait types.
- Referenceable structTargetEntity = new Referenceable("StructTarget");
- Referenceable traitTargetEntity = new Referenceable("TraitTarget");
- Referenceable structContainerEntity = new Referenceable("StructContainer");
- Struct structInstance = new Struct("TestStruct");
- Struct nestedStructInstance = new Struct("NestedStruct");
- Referenceable traitInstance = new Referenceable("TestTrait");
- structContainerEntity.set("struct", structInstance);
- structInstance.set("target", ImmutableList.of(structTargetEntity));
- structInstance.set("nestedStructs", ImmutableList.of(nestedStructInstance));
-
- ClassType structTargetType = typeSystem.getDataType(ClassType.class, "StructTarget");
- ClassType traitTargetType = typeSystem.getDataType(ClassType.class, "TraitTarget");
- ClassType structContainerType = typeSystem.getDataType(ClassType.class, "StructContainer");
-
- ITypedReferenceableInstance structTargetConvertedEntity =
- structTargetType.convert(structTargetEntity, Multiplicity.REQUIRED);
- ITypedReferenceableInstance traitTargetConvertedEntity =
- traitTargetType.convert(traitTargetEntity, Multiplicity.REQUIRED);
- ITypedReferenceableInstance structContainerConvertedEntity =
- structContainerType.convert(structContainerEntity, Multiplicity.REQUIRED);
-
- List<String> guids = repositoryService.createEntities(
- structTargetConvertedEntity, traitTargetConvertedEntity, structContainerConvertedEntity).getCreatedEntities();
- Assert.assertEquals(guids.size(), 3);
-
- guids = repositoryService.getEntityList("StructTarget");
- Assert.assertEquals(guids.size(), 1);
- String structTargetGuid = guids.get(0);
-
- guids = repositoryService.getEntityList("TraitTarget");
- Assert.assertEquals(guids.size(), 1);
- String traitTargetGuid = guids.get(0);
-
- guids = repositoryService.getEntityList("StructContainer");
- Assert.assertEquals(guids.size(), 1);
- String structContainerGuid = guids.get(0);
-
- // Add TestTrait to StructContainer instance
- traitInstance.set("target", ImmutableList.of(new Id(traitTargetGuid, 0, "TraitTarget")));
- TraitType traitType = typeSystem.getDataType(TraitType.class, "TestTrait");
- ITypedStruct convertedTrait = traitType.convert(traitInstance, Multiplicity.REQUIRED);
- repositoryService.addTrait(structContainerGuid, convertedTrait);
-
- // Verify that the unidirectional references from the struct and trait instances
- // are pointing at the target entities.
- structContainerConvertedEntity = repositoryService.getEntityDefinition(structContainerGuid);
- Object object = structContainerConvertedEntity.get("struct");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof ITypedStruct);
- ITypedStruct struct = (ITypedStruct) object;
- object = struct.get("target");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refList = (List<ITypedReferenceableInstance>)object;
- Assert.assertEquals(refList.size(), 1);
- Assert.assertEquals(refList.get(0).getId()._getId(), structTargetGuid);
-
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- Assert.assertNotNull(trait);
- object = trait.get("target");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof List);
- refList = (List<ITypedReferenceableInstance>)object;
- Assert.assertEquals(refList.size(), 1);
- Assert.assertEquals(refList.get(0).getId()._getId(), traitTargetGuid);
-
- // Delete the entities that are targets of the struct and trait instances.
- EntityResult entityResult = deleteEntities(structTargetGuid, traitTargetGuid);
- Assert.assertEquals(entityResult.getDeletedEntities().size(), 2);
- Assert.assertTrue(entityResult.getDeletedEntities().containsAll(Arrays.asList(structTargetGuid, traitTargetGuid)));
- assertEntityDeleted(structTargetGuid);
- assertEntityDeleted(traitTargetGuid);
-
- assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(structContainerGuid);
-
- // Delete the entity which contains nested structs and has the TestTrait trait.
- entityResult = deleteEntities(structContainerGuid);
- Assert.assertEquals(entityResult.getDeletedEntities().size(), 1);
- Assert.assertTrue(entityResult.getDeletedEntities().contains(structContainerGuid));
- assertEntityDeleted(structContainerGuid);
-
- // Verify all TestStruct struct vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestStruct"));
-
- // Verify all NestedStruct struct vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "NestedStruct"));
-
- // Verify all TestTrait trait vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestTrait"));
- }
-
- protected abstract void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(
- String structContainerGuid) throws Exception;
-
- /**
- * Verify deleting entities that are the target of class map references.
- */
- @Test
- public void testDisconnectMapReferenceFromClassType() throws Exception {
- // Define type for map value.
- HierarchicalTypeDefinition<ClassType> mapValueDef = TypesUtil.createClassTypeDef("MapValue",
- ImmutableSet.<String>of(),
- new AttributeDefinition("biMapOwner", "MapOwner", Multiplicity.OPTIONAL, false, "biMap"));
-
- // Define type with unidirectional and bidirectional map references,
- // where the map value is a class reference to MapValue.
- HierarchicalTypeDefinition<ClassType> mapOwnerDef = TypesUtil.createClassTypeDef("MapOwner",
- ImmutableSet.<String>of(),
- new AttributeDefinition("map", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "MapValue"), Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition("biMap", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "MapValue"), Multiplicity.OPTIONAL, false, "biMapOwner"));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(mapOwnerDef, mapValueDef));
- typeSystem.defineTypes(typesDef);
- ClassType mapOwnerType = typeSystem.getDataType(ClassType.class, "MapOwner");
- ClassType mapValueType = typeSystem.getDataType(ClassType.class, "MapValue");
-
- // Create instances of MapOwner and MapValue.
- // Set MapOwner.map and MapOwner.biMap with one entry that references MapValue instance.
- ITypedReferenceableInstance mapOwnerInstance = mapOwnerType.createInstance();
- ITypedReferenceableInstance mapValueInstance = mapValueType.createInstance();
- mapOwnerInstance.set("map", Collections.singletonMap("value1", mapValueInstance));
- mapOwnerInstance.set("biMap", Collections.singletonMap("value1", mapValueInstance));
- // Set biMapOwner reverse reference on MapValue.
- mapValueInstance.set("biMapOwner", mapOwnerInstance);
- List<String> createEntitiesResult = repositoryService.createEntities(mapOwnerInstance, mapValueInstance).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- List<String> guids = repositoryService.getEntityList("MapOwner");
- Assert.assertEquals(guids.size(), 1);
- String mapOwnerGuid = guids.get(0);
-
- String edgeLabel = GraphHelper.getEdgeLabel(mapOwnerType, mapOwnerType.fieldMapping.fields.get("map"));
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
-
- // Verify MapOwner.map attribute has expected value.
- String mapValueGuid = null;
- AtlasVertex mapOwnerVertex = null;
- mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- for (String mapAttrName : Arrays.asList("map", "biMap")) {
- Object object = mapOwnerInstance.get(mapAttrName);
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, ITypedReferenceableInstance> map = (Map<String, ITypedReferenceableInstance>)object;
- Assert.assertEquals(map.size(), 1);
- mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- mapValueGuid = mapValueInstance.getId()._getId();
- mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
- }
-
- // Delete the map value instance.
- // This should disconnect the references from the map owner instance.
- deleteEntities(mapValueGuid);
- assertEntityDeleted(mapValueGuid);
- assertTestDisconnectMapReferenceFromClassType(mapOwnerGuid);
- }
-
- protected abstract void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception;
-
- @Test
- public void testDeleteTargetOfMultiplicityOneRequiredReference() throws Exception {
- createDbTableGraph("db1", "table1");
- ITypedReferenceableInstance db = repositoryService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", "db1");
- try {
- // table1 references db1 through the required reference hive_table.database.
- // Attempt to delete db1 should cause a NullRequiredAttributeException,
- // as that would violate the lower bound on table1's database attribute.
- deleteEntities(db.getId()._getId());
- Assert.fail("Lower bound on attribute hive_table.database was not enforced - " +
- NullRequiredAttributeException.class.getSimpleName() + " was expected but none thrown");
- }
- catch (Exception e) {
- verifyExceptionThrown(e, NullRequiredAttributeException.class);
- }
-
- // Delete table1.
- ITypedReferenceableInstance table1 = repositoryService.getEntityDefinition(TestUtils.TABLE_TYPE, "name", "table1");
- Assert.assertNotNull(table1);
- deleteEntities(table1.getId()._getId());
-
- // Now delete of db1 should succeed, since it is no longer the target
- // of the required reference from the deleted table1.
- deleteEntities(db.getId()._getId());
- }
-
- @Test
- public void testDeleteTargetOfMultiplicityManyRequiredReference() throws Exception {
- String deptGuid = createHrDeptGraph();
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(deptGuid);
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDept);
-
- // Delete John - this should work, as it would reduce the cardinality of Jane's subordinates reference
- // from 2 to 1.
- deleteEntities(nameGuidMap.get("John"));
-
- // Attempt to delete Max - this should cause a NullRequiredAttributeException,
- // as that would reduce the cardinality on Jane's subordinates reference from 1 to 0
- // and violate the lower bound.
- try {
- deleteEntities(nameGuidMap.get("Max"));
- assertTestDeleteTargetOfMultiplicityRequiredReference();
- }
- catch (Exception e) {
- verifyExceptionThrown(e, NullRequiredAttributeException.class);
- }
- }
-
- protected abstract void assertTestDeleteTargetOfMultiplicityRequiredReference() throws Exception;
-
- @Test
- public void testDeleteTargetOfRequiredMapReference() throws Exception {
- // Define type for map value.
- HierarchicalTypeDefinition<ClassType> mapValueDef = TypesUtil.createClassTypeDef("RequiredMapValue",
- ImmutableSet.<String>of());
-
- // Define type with required map references where the map value is a class reference to RequiredMapValue.
- HierarchicalTypeDefinition<ClassType> mapOwnerDef = TypesUtil.createClassTypeDef("RequiredMapOwner",
- ImmutableSet.<String>of(),
- new AttributeDefinition("map", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "RequiredMapValue"), Multiplicity.REQUIRED, false, null));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(mapOwnerDef, mapValueDef));
- typeSystem.defineTypes(typesDef);
- ClassType mapOwnerType = typeSystem.getDataType(ClassType.class, "RequiredMapOwner");
- ClassType mapValueType = typeSystem.getDataType(ClassType.class, "RequiredMapValue");
-
- // Create instances of RequiredMapOwner and RequiredMapValue.
- // Set RequiredMapOwner.map with one entry that references RequiredMapValue instance.
- ITypedReferenceableInstance mapOwnerInstance = mapOwnerType.createInstance();
- ITypedReferenceableInstance mapValueInstance = mapValueType.createInstance();
- mapOwnerInstance.set("map", Collections.singletonMap("value1", mapValueInstance));
- List<String> createEntitiesResult = repositoryService.createEntities(mapOwnerInstance, mapValueInstance).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- List<String> guids = repositoryService.getEntityList("RequiredMapOwner");
- Assert.assertEquals(guids.size(), 1);
- String mapOwnerGuid = guids.get(0);
- guids = repositoryService.getEntityList("RequiredMapValue");
- Assert.assertEquals(guids.size(), 1);
- String mapValueGuid = guids.get(0);
-
- // Verify MapOwner.map attribute has expected value.
- mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- Object object = mapOwnerInstance.get("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, ITypedReferenceableInstance> map = (Map<String, ITypedReferenceableInstance>)object;
- Assert.assertEquals(map.size(), 1);
- mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- Assert.assertEquals(mapValueInstance.getId()._getId(), mapValueGuid);
- String edgeLabel = GraphHelper.getEdgeLabel(mapOwnerType, mapOwnerType.fieldMapping.fields.get("map"));
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
-
- // Verify deleting the target of required map attribute throws a NullRequiredAttributeException.
- try {
- deleteEntities(mapValueGuid);
- Assert.fail(NullRequiredAttributeException.class.getSimpleName() + " was expected but none thrown.");
- }
- catch (Exception e) {
- verifyExceptionThrown(e, NullRequiredAttributeException.class);
- }
- }
-
- @Test
- public void testLowerBoundsIgnoredOnDeletedEntities() throws Exception {
-
- String hrDeptGuid = createHrDeptGraph();
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDept);
-
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition(nameGuidMap.get("John"));
- String johnGuid = john.getId()._getId();
-
- ITypedReferenceableInstance max = repositoryService.getEntityDefinition(nameGuidMap.get("Max"));
- String maxGuid = max.getId()._getId();
-
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition(nameGuidMap.get("Jane"));
- String janeGuid = jane.getId()._getId();
-
- // The lower bound constraint on Manager.subordinates should not be enforced on Jane since that entity is being deleted.
- // Prior to the fix for ATLAS-991, this call would fail with a NullRequiredAttributeException.
- EntityResult deleteResult = deleteEntities(johnGuid, maxGuid, janeGuid);
- Assert.assertEquals(deleteResult.getDeletedEntities().size(), 3);
- Assert.assertTrue(deleteResult.getDeletedEntities().containsAll(Arrays.asList(johnGuid, maxGuid, janeGuid)));
- Assert.assertEquals(deleteResult.getUpdateEntities().size(), 1);
-
- // Verify that Department entity was updated to disconnect its references to the deleted employees.
- Assert.assertEquals(deleteResult.getUpdateEntities().get(0), hrDeptGuid);
- hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Object object = hrDept.get("employees");
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) object;
- assertTestLowerBoundsIgnoredOnDeletedEntities(employees);
- }
-
- protected abstract void assertTestLowerBoundsIgnoredOnDeletedEntities(List<ITypedReferenceableInstance> employees);
-
- @Test
- public void testLowerBoundsIgnoredOnCompositeDeletedEntities() throws Exception {
- String hrDeptGuid = createHrDeptGraph();
- ITypedReferenceableInstance hrDept = repositoryService.getEntityDefinition(hrDeptGuid);
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDept);
- ITypedReferenceableInstance john = repositoryService.getEntityDefinition(nameGuidMap.get("John"));
- String johnGuid = john.getId()._getId();
- ITypedReferenceableInstance max = repositoryService.getEntityDefinition(nameGuidMap.get("Max"));
- String maxGuid = max.getId()._getId();
-
- // The lower bound constraint on Manager.subordinates should not be enforced on the composite entity
- // for Jane owned by the Department entity, since that entity is being deleted.
- // Prior to the fix for ATLAS-991, this call would fail with a NullRequiredAttributeException.
- EntityResult deleteResult = deleteEntities(johnGuid, maxGuid, hrDeptGuid);
- Assert.assertEquals(deleteResult.getDeletedEntities().size(), 5);
- Assert.assertTrue(deleteResult.getDeletedEntities().containsAll(nameGuidMap.values()));
- Assert.assertTrue(deleteResult.getDeletedEntities().contains(hrDeptGuid));
- assertTestLowerBoundsIgnoredOnCompositeDeletedEntities(hrDeptGuid);
- }
-
-
- protected abstract void assertTestLowerBoundsIgnoredOnCompositeDeletedEntities(String hrDeptGuid) throws Exception;
-
- @Test
- public void testLowerBoundsIgnoredWhenDeletingCompositeEntitesOwnedByMap() throws Exception {
- // Define MapValueReferencer type with required reference to CompositeMapValue.
- HierarchicalTypeDefinition<ClassType> mapValueReferencerTypeDef = TypesUtil.createClassTypeDef("MapValueReferencer",
- ImmutableSet.<String>of(),
- new AttributeDefinition("refToMapValue", "CompositeMapValue", Multiplicity.REQUIRED, false, null));
-
- // Define MapValueReferencerContainer type with required composite map reference to MapValueReferencer.
- HierarchicalTypeDefinition<ClassType> mapValueReferencerContainerTypeDef =
- TypesUtil.createClassTypeDef("MapValueReferencerContainer",
- ImmutableSet.<String>of(),
- new AttributeDefinition("requiredMap", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(), "MapValueReferencer"), Multiplicity.REQUIRED, true, null));
-
- Map<String, IDataType> definedClassTypes = typeSystem.defineClassTypes(mapValueReferencerTypeDef, mapValueReferencerContainerTypeDef);
- ClassType mapValueReferencerClassType = (ClassType) definedClassTypes.get("MapValueReferencer");
- ClassType mapValueReferencerContainerType = (ClassType) definedClassTypes.get("MapValueReferencerContainer");
-
- // Create instances of CompositeMapOwner and CompositeMapValue.
- // Set MapOwner.map with one entry that references MapValue instance.
- ITypedReferenceableInstance entityDefinition = createMapOwnerAndValueEntities();
- String mapOwnerGuid = entityDefinition.getId()._getId();
-
- // Verify MapOwner.map attribute has expected value.
- ITypedReferenceableInstance mapOwnerInstance = repositoryService.getEntityDefinition(mapOwnerGuid);
- Object object = mapOwnerInstance.get("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, ITypedReferenceableInstance> map = (Map<String, ITypedReferenceableInstance>)object;
- Assert.assertEquals(map.size(), 1);
- ITypedReferenceableInstance mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- String mapValueGuid = mapValueInstance.getId()._getId();
-
- // Create instance of MapValueReferencerContainer
- RequestContext.createContext();
- ITypedReferenceableInstance mapValueReferencerContainer = mapValueReferencerContainerType.createInstance();
- List<String> createdEntities = repositoryService.createEntities(mapValueReferencerContainer).getCreatedEntities();
- Assert.assertEquals(createdEntities.size(), 1);
- String mapValueReferencerContainerGuid = createdEntities.get(0);
- mapValueReferencerContainer = repositoryService.getEntityDefinition(createdEntities.get(0));
-
- // Create instance of MapValueReferencer, and update mapValueReferencerContainer
- // to reference it.
- ITypedReferenceableInstance mapValueReferencer = mapValueReferencerClassType.createInstance();
- mapValueReferencerContainer.set("requiredMap", Collections.singletonMap("value1", mapValueReferencer));
- mapValueReferencer.set("refToMapValue", mapValueInstance.getId());
-
- RequestContext.createContext();
- EntityResult updateEntitiesResult = repositoryService.updateEntities(mapValueReferencerContainer).getEntityResult();
- Assert.assertEquals(updateEntitiesResult.getCreatedEntities().size(), 1);
- Assert.assertEquals(updateEntitiesResult.getUpdateEntities().size(), 1);
- Assert.assertEquals(updateEntitiesResult.getUpdateEntities().get(0), mapValueReferencerContainerGuid);
- String mapValueReferencerGuid = updateEntitiesResult.getCreatedEntities().get(0);
-
- // Delete map owner and map referencer container. A total of 4 entities should be deleted,
- // including the composite entities. The lower bound constraint on MapValueReferencer.refToMapValue
- // should not be enforced on the composite MapValueReferencer since it is being deleted.
- EntityResult deleteEntitiesResult = repositoryService.deleteEntities(Arrays.asList(mapOwnerGuid, mapValueReferencerContainerGuid));
- Assert.assertEquals(deleteEntitiesResult.getDeletedEntities().size(), 4);
- Assert.assertTrue(deleteEntitiesResult.getDeletedEntities().containsAll(
- Arrays.asList(mapOwnerGuid, mapValueGuid, mapValueReferencerContainerGuid, mapValueReferencerGuid)));
- }
-
- @Test
- public void testDeleteMixOfExistentAndNonExistentEntities() throws Exception {
- ITypedReferenceableInstance entity1 = compositeMapValueType.createInstance();
- ITypedReferenceableInstance entity2 = compositeMapValueType.createInstance();
- List<String> createEntitiesResult = repositoryService.createEntities(entity1, entity2).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- List<String> guids = Arrays.asList(createEntitiesResult.get(0), "non-existent-guid1", "non-existent-guid2", createEntitiesResult.get(1));
- EntityResult deleteEntitiesResult = repositoryService.deleteEntities(guids);
- Assert.assertEquals(deleteEntitiesResult.getDeletedEntities().size(), 2);
- Assert.assertTrue(deleteEntitiesResult.getDeletedEntities().containsAll(createEntitiesResult));
- }
-
- @Test
- public void testDeleteMixOfNullAndNonNullGuids() throws Exception {
- ITypedReferenceableInstance entity1 = compositeMapValueType.createInstance();
- ITypedReferenceableInstance entity2 = compositeMapValueType.createInstance();
- List<String> createEntitiesResult = repositoryService.createEntities(entity1, entity2).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- List<String> guids = Arrays.asList(createEntitiesResult.get(0), null, null, createEntitiesResult.get(1));
- EntityResult deleteEntitiesResult = repositoryService.deleteEntities(guids);
- Assert.assertEquals(deleteEntitiesResult.getDeletedEntities().size(), 2);
- Assert.assertTrue(deleteEntitiesResult.getDeletedEntities().containsAll(createEntitiesResult));
- }
-
- @Test
- public void testDeleteCompositeEntityAndContainer() throws Exception {
- Referenceable db = createDBEntity();
- String dbId = createInstance(db);
-
- Referenceable column = createColumnEntity();
- String colId = createInstance(column);
-
- Referenceable table1 = createTableEntity(dbId);
- table1.set(COLUMNS_ATTR_NAME, Arrays.asList(new Id(colId, 0, COLUMN_TYPE)));
- String table1Id = createInstance(table1);
- Referenceable table2 = createTableEntity(dbId);
- String table2Id = createInstance(table2);
-
- // Delete the tables and column
- EntityResult entityResult = deleteEntities(table1Id, colId, table2Id);
- Assert.assertEquals(entityResult.getDeletedEntities().size(), 3);
- Assert.assertTrue(entityResult.getDeletedEntities().containsAll(Arrays.asList(colId, table1Id, table2Id)));
- assertEntityDeleted(table1Id);
- assertEntityDeleted(colId);
- assertEntityDeleted(table2Id);
- }
-
- @Test
- public void testDeleteEntityWithDuplicateReferenceListElements() throws Exception {
- // Create a table entity, with 2 composite column entities
- Referenceable dbEntity = createDBEntity();
- String dbGuid = createInstance(dbEntity);
- Referenceable table1Entity = createTableEntity(dbGuid);
- String tableName = TestUtils.randomString();
- table1Entity.set(NAME, tableName);
- Referenceable col1 = createColumnEntity();
- col1.set(NAME, TestUtils.randomString());
- Referenceable col2 = createColumnEntity();
- col2.set(NAME, TestUtils.randomString());
- // Populate columns reference list with duplicates.
- table1Entity.set(COLUMNS_ATTR_NAME, ImmutableList.of(col1, col2, col1, col2));
- ClassType dataType = typeSystem.getDataType(ClassType.class, table1Entity.getTypeName());
- ITypedReferenceableInstance instance = dataType.convert(table1Entity, Multiplicity.REQUIRED);
- TestUtils.resetRequestContext();
- List<String> result = repositoryService.createEntities(instance).getCreatedEntities();
- Assert.assertEquals(result.size(), 3);
- ITypedReferenceableInstance entityDefinition = repositoryService.getEntityDefinition(TABLE_TYPE, NAME, tableName);
- String tableGuid = entityDefinition.getId()._getId();
- Object attrValue = entityDefinition.get(COLUMNS_ATTR_NAME);
- assertTrue(attrValue instanceof List);
- List<ITypedReferenceableInstance> columns = (List<ITypedReferenceableInstance>) attrValue;
- Assert.assertEquals(columns.size(), 4);
- TestUtils.resetRequestContext();
- String columnGuid = columns.get(0).getId()._getId();
-
- // Delete one of the columns.
- EntityResult deleteResult = repositoryService.deleteEntities(Collections.singletonList(columnGuid));
- Assert.assertEquals(deleteResult.getDeletedEntities().size(), 1);
- Assert.assertTrue(deleteResult.getDeletedEntities().contains(columnGuid));
- Assert.assertEquals(deleteResult.getUpdateEntities().size(), 1);
- Assert.assertTrue(deleteResult.getUpdateEntities().contains(tableGuid));
-
- // Verify the duplicate edge IDs were all removed from reference property list.
- AtlasVertex tableVertex = GraphHelper.getInstance().getVertexForGUID(tableGuid);
- String columnsPropertyName = GraphHelper.getQualifiedFieldName(dataType, COLUMNS_ATTR_NAME);
- List columnsPropertyValue = tableVertex.getProperty(columnsPropertyName, List.class);
- verifyTestDeleteEntityWithDuplicateReferenceListElements(columnsPropertyValue);
- }
-
- protected abstract void verifyTestDeleteEntityWithDuplicateReferenceListElements(List columnsPropertyValue);
-
- private String createHrDeptGraph() throws Exception {
- ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
-
- List<String> guids = repositoryService.createEntities(hrDept).getCreatedEntities();
- Assert.assertNotNull(guids);
- Assert.assertEquals(guids.size(), 5);
-
- return getDepartmentGuid(guids);
- }
-
- private String getDepartmentGuid(List<String> guids)
- throws RepositoryException, EntityNotFoundException {
-
- String hrDeptGuid = null;
- for (String guid : guids) {
- ITypedReferenceableInstance entityDefinition = repositoryService.getEntityDefinition(guid);
- Id id = entityDefinition.getId();
- if (id.getTypeName().equals("Department")) {
- hrDeptGuid = id._getId();
- break;
- }
- }
- if (hrDeptGuid == null) {
- Assert.fail("Entity for type Department not found");
- }
- return hrDeptGuid;
- }
-
- private void createDbTableGraph(String dbName, String tableName) throws Exception {
- Referenceable databaseInstance = new Referenceable(TestUtils.DATABASE_TYPE);
- databaseInstance.set("name", dbName);
- databaseInstance.set("description", "foo database");
-
- ClassType dbType = typeSystem.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
- ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
- Referenceable tableInstance = new Referenceable(TestUtils.TABLE_TYPE, TestUtils.CLASSIFICATION);
- tableInstance.set("name", tableName);
- tableInstance.set("description", "bar table");
- tableInstance.set("type", "managed");
- Struct traitInstance = (Struct) tableInstance.getTrait(TestUtils.CLASSIFICATION);
- traitInstance.set("tag", "foundation_etl");
- tableInstance.set("tableType", 1); // enum
-
- tableInstance.set("database", databaseInstance);
- ArrayList<Referenceable> columns = new ArrayList<>();
- for (int index = 0; index < 5; index++) {
- Referenceable columnInstance = new Referenceable("column_type");
- final String name = "column_" + index;
- columnInstance.set("name", name);
- columnInstance.set("type", "string");
- columns.add(columnInstance);
- }
- tableInstance.set("columns", columns);
- ClassType tableType = typeSystem.getDataType(ClassType.class, TestUtils.TABLE_TYPE);
- ITypedReferenceableInstance table = tableType.convert(tableInstance, Multiplicity.REQUIRED);
- repositoryService.createEntities(db, table);
- }
-
- protected List<AtlasVertex> getVertices(String propertyName, Object value) {
- AtlasGraph graph = TestUtils.getGraph();
- Iterable<AtlasVertex> vertices = graph.getVertices(propertyName, value);
- List<AtlasVertex> list = new ArrayList<>();
- for (AtlasVertex vertex : vertices) {
- list.add(vertex);
- }
- return list;
- }
-
- private Map<String, String> getEmployeeNameGuidMap(final ITypedReferenceableInstance hrDept) throws AtlasException {
- Object refValue = hrDept.get("employees");
- Assert.assertTrue(refValue instanceof List);
- List<Object> employees = (List<Object>)refValue;
- Assert.assertEquals(employees.size(), 4);
- Map<String, String> nameGuidMap = new HashMap<String, String>() {{
- put("hr", hrDept.getId()._getId());
- }};
-
- for (Object listValue : employees) {
- Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
- nameGuidMap.put((String)employee.get("name"), employee.getId()._getId());
- }
- return nameGuidMap;
- }
-
- /**
- * Search exception cause chain for specified exception.
- *
- * @param thrown root of thrown exception chain
- * @param expected class of expected exception
- */
- private void verifyExceptionThrown(Exception thrown, Class expected) {
-
- boolean exceptionFound = false;
- Throwable cause = thrown;
- while (cause != null) {
- if (expected.isInstance(cause)) {
- // good
- exceptionFound = true;
- break;
- }
- else {
- cause = cause.getCause();
- }
- }
- if (!exceptionFound) {
- Assert.fail(expected.getSimpleName() + " was expected but not thrown", thrown);
- }
- }
-}
[36/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
index f313ddc..caa72ce 100644
--- a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationConsumerTest.java
@@ -18,13 +18,15 @@
package org.apache.atlas.notification;
-import com.google.gson.Gson;
-import com.google.gson.reflect.TypeToken;
import org.apache.atlas.kafka.AtlasKafkaMessage;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.model.notification.MessageVersion;
+import org.codehaus.jackson.type.TypeReference;
import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
-import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
@@ -41,8 +43,6 @@ import org.apache.kafka.common.TopicPartition;
*/
public class AbstractNotificationConsumerTest {
- private static final Gson GSON = new Gson();
-
@Test
public void testReceive() throws Exception {
Logger logger = mock(Logger.class);
@@ -54,27 +54,24 @@ public class AbstractNotificationConsumerTest {
List jsonList = new LinkedList<>();
- jsonList.add(GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1)));
- jsonList.add(GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage2)));
- jsonList.add(GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage3)));
- jsonList.add(GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage4)));
-
- Type notificationMessageType = new TypeToken<AtlasNotificationMessage<TestMessage>>(){}.getType();
+ jsonList.add(AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1)));
+ jsonList.add(AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage2)));
+ jsonList.add(AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage3)));
+ jsonList.add(AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage4)));
- NotificationConsumer<TestMessage> consumer =
- new TestNotificationConsumer<>(notificationMessageType, jsonList, logger);
+ NotificationConsumer<TestMessage> consumer = new TestNotificationConsumer(jsonList, logger);
List<AtlasKafkaMessage<TestMessage>> messageList = consumer.receive();
assertFalse(messageList.isEmpty());
- assertEquals(testMessage1, messageList.get(0).getMessage());
+ assertEquals(messageList.get(0).getMessage(), testMessage1);
- assertEquals(testMessage2, messageList.get(1).getMessage());
+ assertEquals(messageList.get(1).getMessage(), testMessage2);
- assertEquals(testMessage3, messageList.get(2).getMessage());
+ assertEquals(messageList.get(2).getMessage(), testMessage3);
- assertEquals(testMessage4, messageList.get(3).getMessage());
+ assertEquals(messageList.get(3).getMessage(), testMessage4);
}
@Test
@@ -88,20 +85,17 @@ public class AbstractNotificationConsumerTest {
List jsonList = new LinkedList<>();
- String json1 = GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1));
- String json2 = GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("0.0.5"), testMessage2));
- String json3 = GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("0.5.0"), testMessage3));
- String json4 = GSON.toJson(testMessage4);
+ String json1 = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1));
+ String json2 = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("0.0.5"), testMessage2));
+ String json3 = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("0.5.0"), testMessage3));
+ String json4 = AtlasType.toV1Json(testMessage4);
jsonList.add(json1);
jsonList.add(json2);
jsonList.add(json3);
jsonList.add(json4);
- Type notificationMessageType = new TypeToken<AtlasNotificationMessage<TestMessage>>(){}.getType();
-
- NotificationConsumer<TestMessage> consumer =
- new TestNotificationConsumer<>(notificationMessageType, jsonList, logger);
+ NotificationConsumer<TestMessage> consumer = new TestNotificationConsumer(jsonList, logger);
List<AtlasKafkaMessage<TestMessage>> messageList = consumer.receive();
@@ -124,16 +118,13 @@ public class AbstractNotificationConsumerTest {
List jsonList = new LinkedList<>();
- String json1 = GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1));
- String json2 = GSON.toJson(new AtlasNotificationMessage<>(new MessageVersion("2.0.0"), testMessage2));
+ String json1 = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("1.0.0"), testMessage1));
+ String json2 = AtlasType.toV1Json(new AtlasNotificationMessage<>(new MessageVersion("2.0.0"), testMessage2));
jsonList.add(json1);
jsonList.add(json2);
- Type notificationMessageType = new TypeToken<AtlasNotificationMessage<TestMessage>>(){}.getType();
-
- NotificationConsumer<TestMessage> consumer =
- new TestNotificationConsumer<>(notificationMessageType, jsonList, logger);
+ NotificationConsumer<TestMessage> consumer = new TestNotificationConsumer(jsonList, logger);
try {
List<AtlasKafkaMessage<TestMessage>> messageList = consumer.receive();
@@ -150,7 +141,10 @@ public class AbstractNotificationConsumerTest {
private static class TestMessage {
private String s;
- private int i;
+ private int i;
+
+ public TestMessage() {
+ }
public TestMessage(String s, int i) {
this.s = s;
@@ -165,6 +159,14 @@ public class AbstractNotificationConsumerTest {
this.s = s;
}
+ public int getI() {
+ return i;
+ }
+
+ public void setI(int i) {
+ this.i = i;
+ }
+
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -180,12 +182,14 @@ public class AbstractNotificationConsumerTest {
}
}
- private static class TestNotificationConsumer<T> extends AbstractNotificationConsumer<T> {
- private final List<T> messageList;
- private int index = 0;
+ private static class TestNotificationConsumer extends AbstractNotificationConsumer<TestMessage> {
+ private final List<TestMessage> messageList;
+ private int index = 0;
+
+
+ public TestNotificationConsumer(List<TestMessage> messages, Logger logger) {
+ super(new TestMessageDeserializer());
- public TestNotificationConsumer(Type notificationMessageType, List<T> messages, Logger logger) {
- super(new TestDeserializer<T>(notificationMessageType, logger));
this.messageList = messages;
}
@@ -205,24 +209,35 @@ public class AbstractNotificationConsumerTest {
}
@Override
- public List<AtlasKafkaMessage<T>> receive() {
+ public List<AtlasKafkaMessage<TestMessage>> receive() {
return receive(1000L);
}
@Override
- public List<AtlasKafkaMessage<T>> receive(long timeoutMilliSeconds) {
- List<AtlasKafkaMessage<T>> tempMessageList = new ArrayList();
+ public List<AtlasKafkaMessage<TestMessage>> receive(long timeoutMilliSeconds) {
+ List<AtlasKafkaMessage<TestMessage>> tempMessageList = new ArrayList();
for(Object json : messageList) {
- tempMessageList.add(new AtlasKafkaMessage(deserializer.deserialize((String)json), -1, -1));
+ tempMessageList.add(new AtlasKafkaMessage(deserializer.deserialize((String) json), -1, -1));
}
return tempMessageList;
}
}
- private static final class TestDeserializer<T> extends AtlasNotificationMessageDeserializer<T> {
+ public static class TestMessageDeserializer extends AbstractMessageDeserializer<TestMessage> {
+ /**
+ * Logger for hook notification messages.
+ */
+ private static final Logger NOTIFICATION_LOGGER = LoggerFactory.getLogger(TestMessageDeserializer.class);
+
+
+ // ----- Constructors ----------------------------------------------------
- private TestDeserializer(Type notificationMessageType, Logger logger) {
- super(notificationMessageType, AbstractNotification.CURRENT_MESSAGE_VERSION, GSON, logger);
+ /**
+ * Create a hook notification message deserializer.
+ */
+ public TestMessageDeserializer() {
+ super(new TypeReference<TestMessage>() {}, new TypeReference<AtlasNotificationMessage<TestMessage>>() {},
+ AbstractNotification.CURRENT_MESSAGE_VERSION, NOTIFICATION_LOGGER);
}
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
index 655252c..94cb70d 100644
--- a/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/AbstractNotificationTest.java
@@ -19,12 +19,14 @@
package org.apache.atlas.notification;
import org.apache.atlas.AtlasException;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.type.AtlasType;
import org.apache.commons.configuration.Configuration;
-import org.testng.annotations.Test;
import java.util.ArrayList;
-import java.util.LinkedList;
+import java.util.Arrays;
import java.util.List;
import java.util.Map;
@@ -36,70 +38,64 @@ import static org.testng.Assert.*;
*/
public class AbstractNotificationTest {
- @Test
+ @org.testng.annotations.Test
public void testSend() throws Exception {
- Configuration configuration = mock(Configuration.class);
+ Configuration configuration = mock(Configuration.class);
+ TestNotification notification = new TestNotification(configuration);
+ Test message1 = new Test(HookNotificationType.ENTITY_CREATE, "user1");
+ Test message2 = new Test(HookNotificationType.TYPE_CREATE, "user1");
+ Test message3 = new Test(HookNotificationType.ENTITY_FULL_UPDATE, "user1");
+ List<String> messageJson = new ArrayList<>();
- TestNotification notification = new TestNotification(configuration);
-
- TestMessage message1 = new TestMessage(HookNotification.HookNotificationType.ENTITY_CREATE, "user1");
- TestMessage message2 = new TestMessage(HookNotification.HookNotificationType.TYPE_CREATE, "user1");
- TestMessage message3 = new TestMessage(HookNotification.HookNotificationType.ENTITY_FULL_UPDATE, "user1");
-
- List<String> messageJson = new ArrayList<>();
AbstractNotification.createNotificationMessages(message1, messageJson);
AbstractNotification.createNotificationMessages(message2, messageJson);
AbstractNotification.createNotificationMessages(message3, messageJson);
- notification.send(NotificationInterface.NotificationType.HOOK, message1, message2, message3);
+ notification.send(NotificationType.HOOK, message1, message2, message3);
- assertEquals(NotificationInterface.NotificationType.HOOK, notification.type);
+ assertEquals(NotificationType.HOOK, notification.type);
assertEquals(3, notification.messages.size());
+
for (int i = 0; i < notification.messages.size(); i++) {
assertEqualsMessageJson(notification.messages.get(i), messageJson.get(i));
}
}
- @Test
+ @org.testng.annotations.Test
public void testSend2() throws Exception {
- Configuration configuration = mock(Configuration.class);
-
- TestNotification notification = new TestNotification(configuration);
+ Configuration configuration = mock(Configuration.class);
+ TestNotification notification = new TestNotification(configuration);
+ Test message1 = new Test(HookNotificationType.ENTITY_CREATE, "user1");
+ Test message2 = new Test(HookNotificationType.TYPE_CREATE, "user1");
+ Test message3 = new Test(HookNotificationType.ENTITY_FULL_UPDATE, "user1");
+ List<Test> messages = Arrays.asList(message1, message2, message3);
+ List<String> messageJson = new ArrayList<>();
- TestMessage message1 = new TestMessage(HookNotification.HookNotificationType.ENTITY_CREATE, "user1");
- TestMessage message2 = new TestMessage(HookNotification.HookNotificationType.TYPE_CREATE, "user1");
- TestMessage message3 = new TestMessage(HookNotification.HookNotificationType.ENTITY_FULL_UPDATE, "user1");
-
- List<TestMessage> messages = new LinkedList<>();
- messages.add(message1);
- messages.add(message2);
- messages.add(message3);
-
- List<String> messageJson = new ArrayList<>();
AbstractNotification.createNotificationMessages(message1, messageJson);
AbstractNotification.createNotificationMessages(message2, messageJson);
AbstractNotification.createNotificationMessages(message3, messageJson);
notification.send(NotificationInterface.NotificationType.HOOK, messages);
- assertEquals(notification.type, NotificationInterface.NotificationType.HOOK);
+ assertEquals(notification.type, NotificationType.HOOK);
assertEquals(notification.messages.size(), messageJson.size());
+
for (int i = 0; i < notification.messages.size(); i++) {
assertEqualsMessageJson(notification.messages.get(i), messageJson.get(i));
}
}
- public static class TestMessage extends HookNotification.HookNotificationMessage {
+ public static class Test extends HookNotification {
- public TestMessage(HookNotification.HookNotificationType type, String user) {
+ public Test(HookNotificationType type, String user) {
super(type, user);
}
}
// ignore msgCreationTime in Json
private void assertEqualsMessageJson(String msgJsonActual, String msgJsonExpected) {
- Map<Object, Object> msgActual = AbstractNotification.GSON.fromJson(msgJsonActual, Map.class);
- Map<Object, Object> msgExpected = AbstractNotification.GSON.fromJson(msgJsonExpected, Map.class);
+ Map<Object, Object> msgActual = AtlasType.fromV1Json(msgJsonActual, Map.class);
+ Map<Object, Object> msgExpected = AtlasType.fromV1Json(msgJsonExpected, Map.class);
msgActual.remove("msgCreationTime");
msgExpected.remove("msgCreationTime");
@@ -119,7 +115,7 @@ public class AbstractNotificationTest {
protected void sendInternal(NotificationType notificationType, List<String> notificationMessages)
throws NotificationException {
- type = notificationType;
+ type = notificationType;
messages = notificationMessages;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/AtlasNotificationMessageTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/AtlasNotificationMessageTest.java b/notification/src/test/java/org/apache/atlas/notification/AtlasNotificationMessageTest.java
index 27b5034..91a195d 100644
--- a/notification/src/test/java/org/apache/atlas/notification/AtlasNotificationMessageTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/AtlasNotificationMessageTest.java
@@ -18,6 +18,8 @@
package org.apache.atlas.notification;
+import org.apache.atlas.model.notification.AtlasNotificationMessage;
+import org.apache.atlas.model.notification.MessageVersion;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/MessageVersionTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/MessageVersionTest.java b/notification/src/test/java/org/apache/atlas/notification/MessageVersionTest.java
index d1af4b0..d8b3b34 100644
--- a/notification/src/test/java/org/apache/atlas/notification/MessageVersionTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/MessageVersionTest.java
@@ -18,6 +18,7 @@
package org.apache.atlas.notification;
+import org.apache.atlas.model.notification.MessageVersion;
import org.testng.annotations.Test;
import java.util.Arrays;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/SplitMessageAggregatorTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/SplitMessageAggregatorTest.java b/notification/src/test/java/org/apache/atlas/notification/SplitMessageAggregatorTest.java
index 0807221..b79735a 100644
--- a/notification/src/test/java/org/apache/atlas/notification/SplitMessageAggregatorTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/SplitMessageAggregatorTest.java
@@ -17,7 +17,8 @@
*/
package org.apache.atlas.notification;
-import org.apache.atlas.notification.AtlasNotificationBaseMessage.CompressionKind;
+import org.apache.atlas.model.notification.AtlasNotificationBaseMessage.CompressionKind;
+import org.apache.atlas.model.notification.AtlasNotificationStringMessage;
import org.testng.Assert;
import org.testng.annotations.Test;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/entity/EntityMessageDeserializerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/entity/EntityMessageDeserializerTest.java b/notification/src/test/java/org/apache/atlas/notification/entity/EntityMessageDeserializerTest.java
deleted file mode 100644
index 7b513da..0000000
--- a/notification/src/test/java/org/apache/atlas/notification/entity/EntityMessageDeserializerTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification.entity;
-
-import org.apache.atlas.notification.AbstractNotification;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-
-import static org.testng.Assert.assertEquals;
-
-/**
- * EntityMessageDeserializer tests.
- */
-public class EntityMessageDeserializerTest {
-
- @Test
- public void testDeserialize() throws Exception {
- EntityMessageDeserializer deserializer = new EntityMessageDeserializer();
-
- Referenceable entity = EntityNotificationImplTest.getEntity("id");
- String traitName = "MyTrait";
- List<IStruct> traitInfo = new LinkedList<>();
- IStruct trait = new Struct(traitName, Collections.<String, Object>emptyMap());
- traitInfo.add(trait);
-
- EntityNotificationImpl notification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.TRAIT_ADD, traitInfo);
-
- List<String> jsonMsgList = new ArrayList<>();
-
- AbstractNotification.createNotificationMessages(notification, jsonMsgList);
-
- EntityNotification deserializedNotification = null;
-
- for (String jsonMsg : jsonMsgList) {
- deserializedNotification = deserializer.deserialize(jsonMsg);
-
- if (deserializedNotification != null) {
- break;
- }
- }
-
- assertEquals(deserializedNotification.getOperationType(), notification.getOperationType());
- assertEquals(deserializedNotification.getEntity().getId(), notification.getEntity().getId());
- assertEquals(deserializedNotification.getEntity().getTypeName(), notification.getEntity().getTypeName());
- assertEquals(deserializedNotification.getEntity().getTraits(), notification.getEntity().getTraits());
- assertEquals(deserializedNotification.getEntity().getTrait(traitName),
- notification.getEntity().getTrait(traitName));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationDeserializerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationDeserializerTest.java b/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationDeserializerTest.java
new file mode 100644
index 0000000..13eafb6
--- /dev/null
+++ b/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationDeserializerTest.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.notification.entity;
+
+import org.apache.atlas.model.notification.EntityNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.notification.AbstractNotification;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * EntityMessageDeserializer tests.
+ */
+public class EntityNotificationDeserializerTest {
+ private EntityMessageDeserializer deserializer = new EntityMessageDeserializer();
+
+ @Test
+ public void testDeserialize() throws Exception {
+ Referenceable entity = EntityNotificationTest.getEntity("id");
+ String traitName = "MyTrait";
+ List<Struct> traits = Collections.singletonList(new Struct(traitName, Collections.<String, Object>emptyMap()));
+ EntityNotificationV1 notification = new EntityNotificationV1(entity, EntityNotificationV1.OperationType.TRAIT_ADD, traits);
+ List<String> jsonMsgList = new ArrayList<>();
+
+ AbstractNotification.createNotificationMessages(notification, jsonMsgList);
+
+ EntityNotification deserializedNotification = null;
+
+ for (String jsonMsg : jsonMsgList) {
+ deserializedNotification = deserializer.deserialize(jsonMsg);
+
+ if (deserializedNotification != null) {
+ break;
+ }
+ }
+
+ assertTrue(deserializedNotification instanceof EntityNotificationV1);
+
+ EntityNotificationV1 entityNotificationV1 = (EntityNotificationV1)deserializedNotification;
+
+ assertEquals(entityNotificationV1.getOperationType(), notification.getOperationType());
+ assertEquals(entityNotificationV1.getEntity().getId(), notification.getEntity().getId());
+ assertEquals(entityNotificationV1.getEntity().getTypeName(), notification.getEntity().getTypeName());
+ assertEquals(entityNotificationV1.getEntity().getTraits(), notification.getEntity().getTraits());
+ assertEquals(entityNotificationV1.getEntity().getTrait(traitName), notification.getEntity().getTrait(traitName));
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationImplTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationImplTest.java b/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationImplTest.java
deleted file mode 100644
index c3a2db8..0000000
--- a/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationImplTest.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification.entity;
-
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.annotations.Test;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-/**
- * EntityNotificationImpl tests.
- */
-public class EntityNotificationImplTest {
-
- @Test
- public void testGetEntity() throws Exception {
- Referenceable entity = getEntity("id");
-
- EntityNotificationImpl entityNotification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.ENTITY_CREATE,
- Collections.<IStruct>emptyList());
-
- assertEquals(entity, entityNotification.getEntity());
- }
-
- @Test
- public void testGetOperationType() throws Exception {
- Referenceable entity = getEntity("id");
-
- EntityNotificationImpl entityNotification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.ENTITY_CREATE,
- Collections.<IStruct>emptyList());
-
- assertEquals(EntityNotification.OperationType.ENTITY_CREATE, entityNotification.getOperationType());
- }
-
- @Test
- public void testGetAllTraits() throws Exception {
- Referenceable entity = getEntity("id");
- String traitName = "MyTrait";
- List<IStruct> traitInfo = new LinkedList<>();
- IStruct trait = new Struct(traitName, Collections.<String, Object>emptyMap());
- traitInfo.add(trait);
-
- EntityNotificationImpl entityNotification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.TRAIT_ADD, traitInfo);
-
- assertEquals(traitInfo, entityNotification.getAllTraits());
- }
-
- @Test
- public void testGetAllTraitsSuperTraits() throws Exception {
-
- TypeSystem typeSystem = mock(TypeSystem.class);
-
- String traitName = "MyTrait";
- IStruct myTrait = new Struct(traitName);
-
- String superTraitName = "MySuperTrait";
-
- TraitType traitDef = mock(TraitType.class);
- Set<String> superTypeNames = Collections.singleton(superTraitName);
-
- TraitType superTraitDef = mock(TraitType.class);
- Set<String> superSuperTypeNames = Collections.emptySet();
-
- Referenceable entity = getEntity("id", myTrait);
-
- when(typeSystem.getDataType(TraitType.class, traitName)).thenReturn(traitDef);
- when(typeSystem.getDataType(TraitType.class, superTraitName)).thenReturn(superTraitDef);
-
- when(traitDef.getAllSuperTypeNames()).thenReturn(superTypeNames);
- when(superTraitDef.getAllSuperTypeNames()).thenReturn(superSuperTypeNames);
-
- EntityNotificationImpl entityNotification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.TRAIT_ADD, typeSystem);
-
- List<IStruct> allTraits = entityNotification.getAllTraits();
-
- assertEquals(2, allTraits.size());
-
- for (IStruct trait : allTraits) {
- String typeName = trait.getTypeName();
- assertTrue(typeName.equals(traitName) || typeName.equals(superTraitName));
- }
- }
-
- @Test
- public void testEquals() throws Exception {
- Referenceable entity = getEntity("id");
-
- EntityNotificationImpl entityNotification2 =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.ENTITY_CREATE,
- Collections.<IStruct>emptyList());
-
- EntityNotificationImpl entityNotification =
- new EntityNotificationImpl(entity, EntityNotification.OperationType.ENTITY_CREATE,
- Collections.<IStruct>emptyList());
-
- assertTrue(entityNotification.equals(entityNotification2));
- assertTrue(entityNotification2.equals(entityNotification));
- }
-
- public static Referenceable getEntity(String id, IStruct... traits) {
- String typeName = "typeName";
- Map<String, Object> values = new HashMap<>();
-
- List<String> traitNames = new LinkedList<>();
- Map<String, IStruct> traitMap = new HashMap<>();
-
- for (IStruct trait : traits) {
- String traitName = trait.getTypeName();
-
- traitNames.add(traitName);
- traitMap.put(traitName, trait);
- }
- return new Referenceable(id, typeName, values, traitNames, traitMap);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationTest.java b/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationTest.java
new file mode 100644
index 0000000..232b21d
--- /dev/null
+++ b/notification/src/test/java/org/apache/atlas/notification/entity/EntityNotificationTest.java
@@ -0,0 +1,128 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.notification.entity;
+
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasClassificationType;
+import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1.OperationType;
+import org.testng.annotations.Test;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * EntityNotificationV1 tests.
+ */
+public class EntityNotificationTest {
+
+ @Test
+ public void testGetEntity() throws Exception {
+ Referenceable entity = getEntity("id");
+ EntityNotificationV1 entityNotification = new EntityNotificationV1(entity, OperationType.ENTITY_CREATE, Collections.<Struct>emptyList());
+
+ assertEquals(entity, entityNotification.getEntity());
+ }
+
+ @Test
+ public void testGetOperationType() throws Exception {
+ Referenceable entity = getEntity("id");
+ EntityNotificationV1 entityNotification = new EntityNotificationV1(entity, OperationType.ENTITY_CREATE, Collections.<Struct>emptyList());
+
+ assertEquals(EntityNotificationV1.OperationType.ENTITY_CREATE, entityNotification.getOperationType());
+ }
+
+ @Test
+ public void testGetAllTraits() throws Exception {
+ Referenceable entity = getEntity("id");
+ String traitName = "MyTrait";
+ List<Struct> traitInfo = Collections.singletonList(new Struct(traitName, Collections.<String, Object>emptyMap()));
+
+ EntityNotificationV1 entityNotification = new EntityNotificationV1(entity, OperationType.TRAIT_ADD, traitInfo);
+
+ assertEquals(traitInfo, entityNotification.getAllTraits());
+ }
+
+ @Test
+ public void testGetAllTraitsSuperTraits() throws Exception {
+ AtlasTypeRegistry typeRegistry = mock(AtlasTypeRegistry.class);
+ String traitName = "MyTrait";
+ Struct myTrait = new Struct(traitName);
+ String superTraitName = "MySuperTrait";
+ AtlasClassificationType traitType = mock(AtlasClassificationType.class);
+ Set<String> superTypeNames = Collections.singleton(superTraitName);
+ AtlasClassificationType superTraitType = mock(AtlasClassificationType.class);
+ Set<String> superSuperTypeNames = Collections.emptySet();
+ Referenceable entity = getEntity("id", myTrait);
+
+ when(typeRegistry.getClassificationTypeByName(traitName)).thenReturn(traitType);
+ when(typeRegistry.getClassificationTypeByName(superTraitName)).thenReturn(superTraitType);
+
+ when(traitType.getAllSuperTypes()).thenReturn(superTypeNames);
+ when(superTraitType.getAllSuperTypes()).thenReturn(superSuperTypeNames);
+
+ EntityNotificationV1 entityNotification = new EntityNotificationV1(entity, OperationType.TRAIT_ADD, typeRegistry);
+
+ List<Struct> allTraits = entityNotification.getAllTraits();
+
+ assertEquals(2, allTraits.size());
+
+ for (Struct trait : allTraits) {
+ String typeName = trait.getTypeName();
+
+ assertTrue(typeName.equals(traitName) || typeName.equals(superTraitName));
+ }
+ }
+
+ @Test
+ public void testEquals() throws Exception {
+ Referenceable entity = getEntity("id");
+ EntityNotificationV1 entityNotification2 = new EntityNotificationV1(entity, OperationType.ENTITY_CREATE, Collections.<Struct>emptyList());
+ EntityNotificationV1 entityNotification = new EntityNotificationV1(entity, OperationType.ENTITY_CREATE, Collections.<Struct>emptyList());
+
+ assertTrue(entityNotification.equals(entityNotification2));
+ assertTrue(entityNotification2.equals(entityNotification));
+ }
+
+ public static Referenceable getEntity(String id, Struct... traits) {
+ String typeName = "typeName";
+ List<String> traitNames = new LinkedList<>();
+ Map<String, Struct> traitMap = new HashMap<>();
+
+ for (Struct trait : traits) {
+ String traitName = trait.getTypeName();
+
+ traitNames.add(traitName);
+ traitMap.put(traitName, trait);
+ }
+
+ return new Referenceable(id, typeName, new HashMap<String, Object>(), traitNames, traitMap);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/hook/HookMessageDeserializerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/hook/HookMessageDeserializerTest.java b/notification/src/test/java/org/apache/atlas/notification/hook/HookMessageDeserializerTest.java
deleted file mode 100644
index 49b877b..0000000
--- a/notification/src/test/java/org/apache/atlas/notification/hook/HookMessageDeserializerTest.java
+++ /dev/null
@@ -1,170 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.notification.hook;
-
-import org.apache.atlas.notification.AbstractNotification;
-import org.apache.atlas.notification.entity.EntityNotificationImplTest;
-import org.apache.atlas.notification.hook.HookNotification.EntityUpdateRequest;
-import org.apache.atlas.notification.hook.HookNotification.HookNotificationMessage;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.commons.lang3.RandomStringUtils;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
-
-/**
- * HookMessageDeserializer tests.
- */
-public class HookMessageDeserializerTest {
- HookMessageDeserializer deserializer = new HookMessageDeserializer();
-
- @Test
- public void testDeserialize() throws Exception {
- Referenceable entity = generateEntityWithTrait();
- EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
-
- List<String> jsonMsgList = new ArrayList<>();
-
- AbstractNotification.createNotificationMessages(message, jsonMsgList);
-
- HookNotificationMessage deserializedMessage = deserialize(jsonMsgList);
-
- assertEqualMessage(deserializedMessage, message);
- }
-
- // validate deserialization of legacy message, which doesn't use MessageVersion
- @Test
- public void testDeserializeLegacyMessage() throws Exception {
- Referenceable entity = generateEntityWithTrait();
- EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
-
- String jsonMsg = AbstractNotification.GSON.toJson(message);
- HookNotificationMessage deserializedMessage = deserializer.deserialize(jsonMsg);
-
- assertEqualMessage(deserializedMessage, message);
- }
-
- @Test
- public void testDeserializeCompressedMessage() throws Exception {
- Referenceable entity = generateLargeEntityWithTrait();
- EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
-
- List<String> jsonMsgList = new ArrayList<>();
-
- AbstractNotification.createNotificationMessages(message, jsonMsgList);
-
- assertTrue(jsonMsgList.size() == 1);
-
- String compressedMsg = jsonMsgList.get(0);
- String uncompressedMsg = AbstractNotification.GSON.toJson(message);
-
- assertTrue(compressedMsg.length() < uncompressedMsg.length(), "Compressed message (" + compressedMsg.length() + ") should be shorter than uncompressed message (" + uncompressedMsg.length() + ")");
-
- HookNotificationMessage deserializedMessage = deserialize(jsonMsgList);
-
- assertEqualMessage(deserializedMessage, message);
- }
-
- @Test
- public void testDeserializeSplitMessage() throws Exception {
- Referenceable entity = generateVeryLargeEntityWithTrait();
- EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
-
- List<String> jsonMsgList = new ArrayList<>();
-
- AbstractNotification.createNotificationMessages(message, jsonMsgList);
-
- assertTrue(jsonMsgList.size() > 1);
-
- HookNotificationMessage deserializedMessage = deserialize(jsonMsgList);
-
- assertEqualMessage(deserializedMessage, message);
- }
-
- private Referenceable generateEntityWithTrait() {
- Referenceable ret = EntityNotificationImplTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
-
- return ret;
- }
-
- private HookNotificationMessage deserialize(List<String> jsonMsgList) {
- HookNotificationMessage deserializedMessage = null;
-
- for (String jsonMsg : jsonMsgList) {
- deserializedMessage = deserializer.deserialize(jsonMsg);
-
- if (deserializedMessage != null) {
- break;
- }
- }
-
- return deserializedMessage;
- }
-
- private void assertEqualMessage(HookNotificationMessage deserializedMessage, EntityUpdateRequest message) throws Exception {
- assertNotNull(deserializedMessage);
- assertEquals(deserializedMessage.getType(), message.getType());
- assertEquals(deserializedMessage.getUser(), message.getUser());
-
- assertTrue(deserializedMessage instanceof EntityUpdateRequest);
-
- EntityUpdateRequest deserializedEntityUpdateRequest = (EntityUpdateRequest) deserializedMessage;
- Referenceable deserializedEntity = deserializedEntityUpdateRequest.getEntities().get(0);
- Referenceable entity = message.getEntities().get(0);
- String traitName = entity.getTraits().get(0);
-
- assertEquals(deserializedEntity.getId(), entity.getId());
- assertEquals(deserializedEntity.getTypeName(), entity.getTypeName());
- assertEquals(deserializedEntity.getTraits(), entity.getTraits());
- assertEquals(deserializedEntity.getTrait(traitName).hashCode(), entity.getTrait(traitName).hashCode());
-
- }
-
- private Referenceable generateLargeEntityWithTrait() {
- Referenceable ret = EntityNotificationImplTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
-
- // add 100 attributes, each with value of size 10k
- // Json Size=1,027,984; GZipped Size=16,387 ==> will compress, but not split
- String attrValue = RandomStringUtils.randomAlphanumeric(10 * 1024); // use the same value for all attributes - to aid better compression
- for (int i = 0; i < 100; i++) {
- ret.set("attr_" + i, attrValue);
- }
-
- return ret;
- }
-
- private Referenceable generateVeryLargeEntityWithTrait() {
- Referenceable ret = EntityNotificationImplTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
-
- // add 300 attributes, each with value of size 10k
- // Json Size=3,082,384; GZipped Size=2,313,357 ==> will compress & split
- for (int i = 0; i < 300; i++) {
- ret.set("attr_" + i, RandomStringUtils.randomAlphanumeric(10 * 1024));
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationDeserializerTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationDeserializerTest.java b/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationDeserializerTest.java
new file mode 100644
index 0000000..d048170
--- /dev/null
+++ b/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationDeserializerTest.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.notification.hook;
+
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.notification.entity.EntityNotificationTest;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.notification.AbstractNotification;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
+import org.apache.atlas.type.AtlasType;
+import org.apache.commons.lang3.RandomStringUtils;
+import org.testng.annotations.Test;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+import static org.testng.Assert.assertTrue;
+
+/**
+ * HookMessageDeserializer tests.
+ */
+public class HookNotificationDeserializerTest {
+ private HookMessageDeserializer deserializer = new HookMessageDeserializer();
+
+ @Test
+ public void testDeserialize() throws Exception {
+ Referenceable entity = generateEntityWithTrait();
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ List<String> jsonMsgList = new ArrayList<>();
+
+ AbstractNotification.createNotificationMessages(message, jsonMsgList);
+
+ HookNotification deserializedMessage = deserialize(jsonMsgList);
+
+ assertEqualMessage(deserializedMessage, message);
+ }
+
+ // validate deserialization of legacy message, which doesn't use MessageVersion
+ @Test
+ public void testDeserializeLegacyMessage() throws Exception {
+ Referenceable entity = generateEntityWithTrait();
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ String jsonMsg = AtlasType.toV1Json(message);
+ HookNotification deserializedMessage = deserialize(Collections.singletonList(jsonMsg));
+
+ assertEqualMessage(deserializedMessage, message);
+ }
+
+ @Test
+ public void testDeserializeCompressedMessage() throws Exception {
+ Referenceable entity = generateLargeEntityWithTrait();
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ List<String> jsonMsgList = new ArrayList<>();
+
+ AbstractNotification.createNotificationMessages(message, jsonMsgList);
+
+ assertTrue(jsonMsgList.size() == 1);
+
+ String compressedMsg = jsonMsgList.get(0);
+ String uncompressedMsg = AtlasType.toV1Json(message);
+
+ assertTrue(compressedMsg.length() < uncompressedMsg.length(), "Compressed message (" + compressedMsg.length() + ") should be shorter than uncompressed message (" + uncompressedMsg.length() + ")");
+
+ HookNotification deserializedMessage = deserialize(jsonMsgList);
+
+ assertEqualMessage(deserializedMessage, message);
+ }
+
+ @Test
+ public void testDeserializeSplitMessage() throws Exception {
+ Referenceable entity = generateVeryLargeEntityWithTrait();
+ EntityUpdateRequest message = new EntityUpdateRequest("user1", entity);
+ List<String> jsonMsgList = new ArrayList<>();
+
+ AbstractNotification.createNotificationMessages(message, jsonMsgList);
+
+ assertTrue(jsonMsgList.size() > 1);
+
+ HookNotification deserializedMessage = deserialize(jsonMsgList);
+
+ assertEqualMessage(deserializedMessage, message);
+ }
+
+ private Referenceable generateEntityWithTrait() {
+ Referenceable ret = EntityNotificationTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
+
+ return ret;
+ }
+
+ private HookNotification deserialize(List<String> jsonMsgList) {
+ HookNotification deserializedMessage = null;
+
+ for (String jsonMsg : jsonMsgList) {
+ deserializedMessage = deserializer.deserialize(jsonMsg);
+
+ if (deserializedMessage != null) {
+ break;
+ }
+ }
+
+ return deserializedMessage;
+ }
+
+ private void assertEqualMessage(HookNotification deserializedMessage, EntityUpdateRequest message) throws Exception {
+ assertNotNull(deserializedMessage);
+ assertEquals(deserializedMessage.getType(), message.getType());
+ assertEquals(deserializedMessage.getUser(), message.getUser());
+
+ assertTrue(deserializedMessage instanceof EntityUpdateRequest);
+
+ EntityUpdateRequest deserializedEntityUpdateRequest = (EntityUpdateRequest) deserializedMessage;
+ Referenceable deserializedEntity = deserializedEntityUpdateRequest.getEntities().get(0);
+ Referenceable entity = message.getEntities().get(0);
+ String traitName = entity.getTraitNames().get(0);
+
+ assertEquals(deserializedEntity.getId(), entity.getId());
+ assertEquals(deserializedEntity.getTypeName(), entity.getTypeName());
+ assertEquals(deserializedEntity.getTraits(), entity.getTraits());
+ assertEquals(deserializedEntity.getTrait(traitName).hashCode(), entity.getTrait(traitName).hashCode());
+
+ }
+
+ private Referenceable generateLargeEntityWithTrait() {
+ Referenceable ret = EntityNotificationTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
+
+ // add 100 attributes, each with value of size 10k
+ // Json Size=1,027,984; GZipped Size=16,387 ==> will compress, but not split
+ String attrValue = RandomStringUtils.randomAlphanumeric(10 * 1024); // use the same value for all attributes - to aid better compression
+ for (int i = 0; i < 100; i++) {
+ ret.set("attr_" + i, attrValue);
+ }
+
+ return ret;
+ }
+
+ private Referenceable generateVeryLargeEntityWithTrait() {
+ Referenceable ret = EntityNotificationTest.getEntity("id", new Struct("MyTrait", Collections.<String, Object>emptyMap()));
+
+ // add 300 attributes, each with value of size 10k
+ // Json Size=3,082,384; GZipped Size=2,313,357 ==> will compress & split
+ for (int i = 0; i < 300; i++) {
+ ret.set("attr_" + i, RandomStringUtils.randomAlphanumeric(10 * 1024));
+ }
+
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationTest.java
----------------------------------------------------------------------
diff --git a/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationTest.java b/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationTest.java
index dd3257e..cf691af 100644
--- a/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationTest.java
+++ b/notification/src/test/java/org/apache/atlas/notification/hook/HookNotificationTest.java
@@ -17,16 +17,19 @@
*/
package org.apache.atlas.notification.hook;
-import org.apache.atlas.notification.AbstractNotification;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNull;
+import static org.testng.Assert.assertTrue;
-public class HookNotificationTest {
- public static final HookMessageDeserializer HOOK_MESSAGE_DESERIALIZER = new HookMessageDeserializer();
+public class HookNotificationTest {
+ private HookMessageDeserializer deserializer = new HookMessageDeserializer();
@Test
public void testNewMessageSerDe() throws Exception {
@@ -35,19 +38,21 @@ public class HookNotificationTest {
entity1.set("complex", new Referenceable("othertype"));
Referenceable entity2 = new Referenceable("newtype");
String user = "user";
- HookNotification.EntityCreateRequest request = new HookNotification.EntityCreateRequest(user, entity1, entity2);
- String notificationJson = AbstractNotification.GSON.toJson(request);
- HookNotification.HookNotificationMessage actualNotification =
- HOOK_MESSAGE_DESERIALIZER.deserialize(notificationJson);
+ EntityCreateRequest request = new EntityCreateRequest(user, entity1, entity2);
+ String notificationJson = AtlasType.toV1Json(request);
+ HookNotification actualNotification = deserializer.deserialize(notificationJson);
- assertEquals(actualNotification.getType(), HookNotification.HookNotificationType.ENTITY_CREATE);
+ assertEquals(actualNotification.getType(), HookNotificationType.ENTITY_CREATE);
assertEquals(actualNotification.getUser(), user);
+ assertTrue(actualNotification instanceof EntityCreateRequest);
+
+ EntityCreateRequest createRequest = (EntityCreateRequest) actualNotification;
- HookNotification.EntityCreateRequest createRequest = (HookNotification.EntityCreateRequest) actualNotification;
assertEquals(createRequest.getEntities().size(), 2);
Referenceable actualEntity1 = createRequest.getEntities().get(0);
+
assertEquals(actualEntity1.getTypeName(), "sometype");
assertEquals(((Referenceable)actualEntity1.get("complex")).getTypeName(), "othertype");
assertEquals(createRequest.getEntities().get(1).getTypeName(), "newtype");
@@ -58,9 +63,10 @@ public class HookNotificationTest {
//Code to generate the json, use it for hard-coded json used later in this test
Referenceable entity = new Referenceable("sometype");
entity.set("attr", "value");
- HookNotification.EntityCreateRequest request = new HookNotification.EntityCreateRequest(null, entity);
- String notificationJsonFromCode = AbstractNotification.GSON.toJson(request);
+ EntityCreateRequest request = new EntityCreateRequest(null, entity);
+ String notificationJsonFromCode = AtlasType.toV1Json(request);
+
System.out.println(notificationJsonFromCode);
//Json without user and assert that the string can be deserialised
@@ -87,11 +93,9 @@ public class HookNotificationTest {
+ "}";
- HookNotification.HookNotificationMessage actualNotification =
- HOOK_MESSAGE_DESERIALIZER.deserialize(notificationJson);
+ HookNotification actualNotification = deserializer.deserialize(notificationJson);
- assertEquals(actualNotification.getType(), HookNotification.HookNotificationType.ENTITY_CREATE);
- assertNull(actualNotification.user);
- assertEquals(actualNotification.getUser(), HookNotification.HookNotificationMessage.UNKNOW_USER);
+ assertEquals(actualNotification.getType(), HookNotificationType.ENTITY_CREATE);
+ assertEquals(actualNotification.getUser(), HookNotification.UNKNOW_USER);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3c48b86..cbf9e2a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -535,13 +535,10 @@
<hbase.version>1.1.2</hbase.version>
<solr.version>5.5.1</solr.version>
<kafka.version>0.10.0.0</kafka.version>
+ <kafka.scala.binary.version>2.11</kafka.scala.binary.version>
<curator.version>2.11.0</curator.version>
<zookeeper.version>3.4.6</zookeeper.version>
- <!-- scala versions -->
- <scala.version>2.11.8</scala.version>
- <scala.binary.version>2.11</scala.binary.version>
- <scala.macros.version>2.0.1</scala.macros.version>
<json.version>3.2.11</json.version>
<log4j.version>1.2.17</log4j.version>
<akka.version>2.3.7</akka.version>
@@ -582,7 +579,7 @@
<jetty-maven-plugin.stopWait>10</jetty-maven-plugin.stopWait>
<!-- The following graph.{storage|index}.backend definitions dictate the backends for test
- resources in typesystem and each of the graph profiles. They do not affect packaging
+ resources in intg and each of the graph profiles. They do not affect packaging
which is handled by defaults and profiles set in atlas-distro POM -->
<graph.storage.backend>berkeleyje</graph.storage.backend>
<graph.index.backend>elasticsearch</graph.index.backend>
@@ -725,7 +722,6 @@
<module>build-tools</module>
<module>intg</module>
<module>common</module>
- <module>typesystem</module>
<module>server-api</module>
<module>notification</module>
<module>client</module>
@@ -748,7 +744,7 @@
<module>addons/sqoop-bridge</module>
<module>addons/storm-bridge-shim</module>
<module>addons/storm-bridge</module>
- <module>addons/hbase-bridge-shim</module>
+ <module>addons/hbase-bridge-shim</module>
<module>addons/hbase-bridge</module>
<module>distro</module>
</modules>
@@ -804,7 +800,6 @@
<dependencyManagement>
<dependencies>
-
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
@@ -1336,20 +1331,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-graphdb-api</artifactId>
<version>${project.version}</version>
</dependency>
@@ -1531,109 +1512,6 @@
</dependency>
- <!--Scala dependencies-->
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors</artifactId>
- <version>${scala.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </dependency>
-
- <dependency>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.github.nscala-time</groupId>
- <artifactId>nscala-time_${scala.binary.version}</artifactId>
- <version>1.6.0</version>
- </dependency>
-
- <dependency>
- <groupId>com.typesafe</groupId>
- <artifactId>config</artifactId>
- <version>1.2.1</version>
- </dependency>
-
- <dependency>
- <groupId>com.typesafe.akka</groupId>
- <artifactId>akka-actor_${scala.binary.version}</artifactId>
- <version>${akka.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.typesafe.akka</groupId>
- <artifactId>akka-testkit_${scala.binary.version}</artifactId>
- <version>${akka.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.typesafe.akka</groupId>
- <artifactId>akka-slf4j_${scala.binary.version}</artifactId>
- <version>${akka.version}</version>
- </dependency>
-
- <dependency>
- <groupId>io.spray</groupId>
- <artifactId>spray-routing</artifactId>
- <version>${spray.version}</version>
- </dependency>
-
- <dependency>
- <groupId>io.spray</groupId>
- <artifactId>spray-can</artifactId>
- <version>${spray.version}</version>
- </dependency>
-
- <dependency>
- <groupId>io.spray</groupId>
- <artifactId>spray-httpx</artifactId>
- <version>${spray.version}</version>
- </dependency>
-
- <dependency>
- <groupId>io.spray</groupId>
- <artifactId>spray-testkit</artifactId>
- <version>${spray.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </dependency>
-
- <dependency>
- <groupId>it.unimi.dsi</groupId>
- <artifactId>fastutil</artifactId>
- <version>${fastutil.version}</version>
- </dependency>
-
<!-- API documentation -->
<dependency>
<groupId>com.webcohesion.enunciate</groupId>
@@ -1704,7 +1582,7 @@
<dependency>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
<exclusions>
<exclusion>
@@ -1778,10 +1656,6 @@
<directory>src/test/resources</directory>
<filtering>true</filtering>
</testResource>
- <testResource>
- <directory>typesystem/src/test/resources</directory>
- <filtering>true</filtering>
- </testResource>
</testResources>
<pluginManagement>
@@ -1796,6 +1670,10 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.7.0</version>
+ <configuration>
+ <source>1.8</source>
+ <target>1.8</target>
+ </configuration>
</plugin>
<plugin>
@@ -1927,16 +1805,6 @@
</execution>
</executions>
</plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.7.0</version>
- <configuration>
- <source>1.7</source>
- <target>1.7</target>
- <optimize>true</optimize>
- </configuration>
- </plugin>
<plugin>
<groupId>org.apache.felix</groupId>
@@ -2028,12 +1896,6 @@
</plugin>
<plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- </plugin>
-
- <plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId>
<executions>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/pom.xml
----------------------------------------------------------------------
diff --git a/repository/pom.xml b/repository/pom.xml
index f981c76..51ddb76 100755
--- a/repository/pom.xml
+++ b/repository/pom.xml
@@ -32,7 +32,6 @@
<packaging>jar</packaging>
<dependencies>
-
<dependency>
<groupId>org.apache.atlas</groupId>
<artifactId>atlas-intg</artifactId>
@@ -40,11 +39,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-server-api</artifactId>
</dependency>
@@ -58,6 +52,11 @@
<artifactId>atlas-graphdb-api</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-lang3</artifactId>
+ </dependency>
+
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
@@ -69,36 +68,6 @@
</dependency>
<dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.skyscreamer</groupId>
- <artifactId>jsonassert</artifactId>
- </dependency>
-
- <dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
</dependency>
@@ -108,13 +77,6 @@
<artifactId>mockito-all</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
-
<!-- Test dependencies -->
<dependency>
@@ -173,43 +135,6 @@
<build>
<plugins>
<plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <scalaVersion>${scala.version}</scalaVersion>
- <recompileMode>incremental</recompileMode>
- <useZincServer>true</useZincServer>
- <source>1.7</source>
- <target>1.7</target>
- <args>
- <arg>-unchecked</arg>
- <arg>-deprecation</arg>
- <arg>-feature</arg>
- </args>
- <jvmArgs>
- <jvmArg>-Xmx512m</jvmArg>
- </jvmArgs>
- </configuration>
- </plugin>
-
- <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.4</version>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/GraphTransactionInterceptor.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/GraphTransactionInterceptor.java b/repository/src/main/java/org/apache/atlas/GraphTransactionInterceptor.java
index c6a4bbe..b3e690f 100644
--- a/repository/src/main/java/org/apache/atlas/GraphTransactionInterceptor.java
+++ b/repository/src/main/java/org/apache/atlas/GraphTransactionInterceptor.java
@@ -22,7 +22,7 @@ import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.typesystem.exception.NotFoundException;
+import org.apache.atlas.exception.NotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/AtlasLineageService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/AtlasLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/AtlasLineageService.java
index 5f49625..8dc6d3a 100644
--- a/repository/src/main/java/org/apache/atlas/discovery/AtlasLineageService.java
+++ b/repository/src/main/java/org/apache/atlas/discovery/AtlasLineageService.java
@@ -22,6 +22,7 @@ package org.apache.atlas.discovery;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.lineage.AtlasLineageInfo;
import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
+import org.apache.atlas.v1.model.lineage.SchemaResponse.SchemaDetails;
public interface AtlasLineageService {
/**
@@ -32,4 +33,19 @@ public interface AtlasLineageService {
*/
AtlasLineageInfo getAtlasLineageInfo(String entityGuid, LineageDirection direction, int depth) throws AtlasBaseException;
+ /**
+ * Return the schema for the given datasetName.
+ *
+ * @param datasetName datasetName
+ * @return Schema as JSON
+ */
+ SchemaDetails getSchemaForHiveTableByName(String datasetName) throws AtlasBaseException;
+
+ /**
+ * Return the schema for the given entity id.
+ *
+ * @param guid tableName
+ * @return Schema as JSON
+ */
+ SchemaDetails getSchemaForHiveTableByGuid(String guid) throws AtlasBaseException;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java
deleted file mode 100644
index af7f1b4..0000000
--- a/repository/src/main/java/org/apache/atlas/discovery/DataSetLineageService.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery;
-
-import org.apache.atlas.ApplicationProperties;
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasConfiguration;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
-import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
-import org.apache.atlas.query.GremlinQueryResult;
-import org.apache.atlas.query.InputLineageClosureQuery;
-import org.apache.atlas.query.OutputLineageClosureQuery;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.SchemaNotFoundException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.TypeUtils;
-import org.apache.atlas.utils.ParamChecker;
-import org.apache.commons.configuration.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-import scala.Option;
-import scala.Some;
-import scala.collection.JavaConversions;
-import scala.collection.immutable.List;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.Arrays;
-import java.util.Iterator;
-
-/**
- * Hive implementation of Lineage service interface.
- */
-@Singleton
-@Component
-public class DataSetLineageService implements LineageService {
-
- private static final Logger LOG = LoggerFactory.getLogger(DataSetLineageService.class);
-
- private static final Option<List<String>> SELECT_ATTRIBUTES =
- Some.apply(JavaConversions.asScalaBuffer(Arrays.asList(AtlasClient.NAME,
- AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME)).toList());
- public static final String SELECT_INSTANCE_GUID = "__guid";
-
- public static final String DATASET_SCHEMA_QUERY_PREFIX = "atlas.lineage.schema.query.";
-
- private static final String HIVE_PROCESS_TYPE_NAME = "Process";
- private static final String HIVE_PROCESS_INPUT_ATTRIBUTE_NAME = "inputs";
- private static final String HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME = "outputs";
-
- private static final Configuration propertiesConf;
-
- static {
- try {
- propertiesConf = ApplicationProperties.get();
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
-
- private final AtlasGraph graph;
- private final DefaultGraphPersistenceStrategy graphPersistenceStrategy;
- private final GraphBackedDiscoveryService discoveryService;
-
- @Inject
- DataSetLineageService(MetadataRepository metadataRepository,
- GraphBackedDiscoveryService discoveryService,
- AtlasGraph atlasGraph) throws DiscoveryException {
- this.graph = atlasGraph;
- this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
- this.discoveryService = discoveryService;
- }
-
- /**
- * Return the lineage outputs graph for the given datasetName.
- *
- * @param datasetName datasetName
- * @return Outputs Graph as JSON
- */
- @Override
- @GraphTransaction
- public String getOutputsGraph(String datasetName) throws AtlasException {
- LOG.info("Fetching lineage outputs graph for datasetName={}", datasetName);
- datasetName = ParamChecker.notEmpty(datasetName, "dataset name");
- TypeUtils.Pair<String, String> typeIdPair = validateDatasetNameExists(datasetName);
- return getOutputsGraphForId(typeIdPair.right);
- }
-
- /**
- * Return the lineage inputs graph for the given tableName.
- *
- * @param tableName tableName
- * @return Inputs Graph as JSON
- */
- @Override
- @GraphTransaction
- public String getInputsGraph(String tableName) throws AtlasException {
- LOG.info("Fetching lineage inputs graph for tableName={}", tableName);
- tableName = ParamChecker.notEmpty(tableName, "table name");
- TypeUtils.Pair<String, String> typeIdPair = validateDatasetNameExists(tableName);
- return getInputsGraphForId(typeIdPair.right);
- }
-
- @Override
- @GraphTransaction
- public String getInputsGraphForEntity(String guid) throws AtlasException {
- LOG.info("Fetching lineage inputs graph for entity={}", guid);
- guid = ParamChecker.notEmpty(guid, "Entity id");
- validateDatasetExists(guid);
- return getInputsGraphForId(guid);
- }
-
- private String getInputsGraphForId(String guid) {
- InputLineageClosureQuery
- inputsQuery = new InputLineageClosureQuery(AtlasClient.DATA_SET_SUPER_TYPE, SELECT_INSTANCE_GUID,
- guid, HIVE_PROCESS_TYPE_NAME,
- HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
- SELECT_ATTRIBUTES, true, graphPersistenceStrategy, graph);
- GremlinQueryResult result = inputsQuery.evaluate();
- return inputsQuery.graph(result).toInstanceJson();
- }
-
- @Override
- @GraphTransaction
- public String getOutputsGraphForEntity(String guid) throws AtlasException {
- LOG.info("Fetching lineage outputs graph for entity guid={}", guid);
- guid = ParamChecker.notEmpty(guid, "Entity id");
- validateDatasetExists(guid);
- return getOutputsGraphForId(guid);
- }
-
- private String getOutputsGraphForId(String guid) {
- OutputLineageClosureQuery outputsQuery =
- new OutputLineageClosureQuery(AtlasClient.DATA_SET_SUPER_TYPE, SELECT_INSTANCE_GUID, guid, HIVE_PROCESS_TYPE_NAME,
- HIVE_PROCESS_INPUT_ATTRIBUTE_NAME, HIVE_PROCESS_OUTPUT_ATTRIBUTE_NAME, Option.empty(),
- SELECT_ATTRIBUTES, true, graphPersistenceStrategy, graph);
- GremlinQueryResult result = outputsQuery.evaluate();
- return outputsQuery.graph(result).toInstanceJson();
- }
-
- /**
- * Return the schema for the given tableName.
- *
- * @param datasetName tableName
- * @return Schema as JSON
- */
- @Override
- @GraphTransaction
- public String getSchema(String datasetName) throws AtlasException {
- datasetName = ParamChecker.notEmpty(datasetName, "table name");
- LOG.info("Fetching schema for tableName={}", datasetName);
- TypeUtils.Pair<String, String> typeIdPair = validateDatasetNameExists(datasetName);
-
- return getSchemaForId(typeIdPair.left, typeIdPair.right);
- }
-
- private String getSchemaForId(String typeName, String guid) throws DiscoveryException, SchemaNotFoundException {
- String configName = DATASET_SCHEMA_QUERY_PREFIX + typeName;
- if (propertiesConf.getString(configName) != null) {
- final String schemaQuery =
- String.format(propertiesConf.getString(configName), guid);
- int limit = AtlasConfiguration.SEARCH_MAX_LIMIT.getInt();
- return discoveryService.searchByDSL(schemaQuery, new QueryParams(limit, 0));
- }
- throw new SchemaNotFoundException("Schema is not configured for type " + typeName + ". Configure " + configName);
- }
-
- @Override
- @GraphTransaction
- public String getSchemaForEntity(String guid) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "Entity id");
- LOG.info("Fetching schema for entity guid={}", guid);
- String typeName = validateDatasetExists(guid);
- return getSchemaForId(typeName, guid);
- }
-
- /**
- * Validate if indeed this is a table type and exists.
- *
- * @param datasetName table name
- */
- private TypeUtils.Pair<String, String> validateDatasetNameExists(String datasetName) throws AtlasException {
- Iterator<AtlasVertex> results = graph.query().has("Referenceable.qualifiedName", datasetName)
- .has(Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name())
- .has(Constants.SUPER_TYPES_PROPERTY_KEY, AtlasClient.DATA_SET_SUPER_TYPE)
- .vertices().iterator();
- while (results.hasNext()) {
- AtlasVertex vertex = results.next();
- return TypeUtils.Pair.of(GraphHelper.getTypeName(vertex), GraphHelper.getGuid(vertex));
- }
- throw new EntityNotFoundException("Dataset with name = " + datasetName + " does not exist");
- }
-
- /**
- * Validate if indeed this is a table type and exists.
- *
- * @param guid entity id
- */
- private String validateDatasetExists(String guid) throws AtlasException {
- for (AtlasVertex vertex : (Iterable<AtlasVertex>) graph.query().has(Constants.GUID_PROPERTY_KEY, guid)
- .has(Constants.SUPER_TYPES_PROPERTY_KEY, AtlasClient.DATA_SET_SUPER_TYPE)
- .vertices()) {
- return GraphHelper.getTypeName(vertex);
- }
- throw new EntityNotFoundException("Dataset with guid = " + guid + " does not exist");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/DiscoveryService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/DiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/DiscoveryService.java
deleted file mode 100644
index e86047e..0000000
--- a/repository/src/main/java/org/apache/atlas/discovery/DiscoveryService.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery;
-
-import org.apache.atlas.query.QueryParams;
-
-import java.util.List;
-import java.util.Map;
-
-/**
- * Metadata discovery service.
- */
-public interface DiscoveryService {
-
- /**
- * Searches using Full text query
- * @param query query string
- * @param queryParams Default query parameters like limit, offset
- * @return results json
- * @throws DiscoveryException
- */
- String searchByFullText(String query, QueryParams queryParams) throws DiscoveryException;
-
- /**
- * Searches using DSL query
- * @param dslQuery query string
- * @param queryParams Default query parameters like limit, offset
- * @return results json
- * @throws DiscoveryException
- */
- String searchByDSL(String dslQuery, QueryParams queryParams) throws DiscoveryException;
-
- /**
- * Assumes the User is familiar with the persistence structure of the Repository.
- * The given query is run uninterpreted against the underlying Graph Store.
- * The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
- *
- * @param gremlinQuery query in gremlin dsl format
- * @return List of Maps
- * @throws org.apache.atlas.discovery.DiscoveryException
- */
- List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException;
-}
[16/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/InverseReferenceUpdateV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/InverseReferenceUpdateV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/InverseReferenceUpdateV1Test.java
index 6475639..2c21638 100644
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/InverseReferenceUpdateV1Test.java
+++ b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/InverseReferenceUpdateV1Test.java
@@ -20,7 +20,6 @@ package org.apache.atlas.repository.store.graph.v1;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.atlas.RequestContextV1;
-import org.apache.atlas.TestUtils;
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
@@ -32,7 +31,6 @@ import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
-import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasTypeRegistry;
@@ -48,11 +46,12 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import static org.apache.atlas.TestUtils.NAME;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
+import static org.apache.atlas.TestUtilsV2.NAME;
+
/**
* Test automatic inverse reference updating in V1 (V2?) code path.
*
@@ -67,9 +66,6 @@ public abstract class InverseReferenceUpdateV1Test {
@Inject
AtlasEntityStore entityStore;
- @Inject
- MetadataService metadataService;
-
private AtlasEntitiesWithExtInfo deptEntity;
protected Map<String, AtlasObjectId> nameIdMap = new HashMap<>();
@@ -79,8 +75,6 @@ public abstract class InverseReferenceUpdateV1Test {
RequestContextV1.clear();
RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
- metadataService = TestUtils.addSessionCleanupWrapper(metadataService);
-
AtlasTypesDef[] testTypesDefs = new AtlasTypesDef[] { TestUtilsV2.defineDeptEmployeeTypes(),
TestUtilsV2.defineInverseReferenceTestTypes()
};
@@ -147,14 +141,14 @@ public abstract class InverseReferenceUpdateV1Test {
public void testInverseReferenceAutoUpdate_NonCompositeManyToOne() throws Exception {
AtlasEntityType bType = typeRegistry.getEntityTypeByName("B");
AtlasEntity a1 = new AtlasEntity("A");
- a1.setAttribute(NAME, TestUtils.randomString());
+ a1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity a2 = new AtlasEntity("A");
- a2.setAttribute(NAME, TestUtils.randomString());
+ a2.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity a3 = new AtlasEntity("A");
- a3.setAttribute(NAME, TestUtils.randomString());
+ a3.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b = new AtlasEntity("B");
- b.setAttribute(NAME, TestUtils.randomString());
+ b.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntitiesWithExtInfo atlasEntitiesWithExtInfo = new AtlasEntitiesWithExtInfo();
atlasEntitiesWithExtInfo.addEntity(a1);
atlasEntitiesWithExtInfo.addEntity(a2);
@@ -208,11 +202,11 @@ public abstract class InverseReferenceUpdateV1Test {
public void testInverseReferenceAutoUpdate_NonComposite_OneToOne() throws Exception {
AtlasEntityType bType = typeRegistry.getEntityTypeByName("B");
AtlasEntity a1 = new AtlasEntity("A");
- a1.setAttribute(NAME, TestUtils.randomString());
+ a1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity a2 = new AtlasEntity("A");
- a2.setAttribute(NAME, TestUtils.randomString());
+ a2.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b = new AtlasEntity("B");
- b.setAttribute(NAME, TestUtils.randomString());
+ b.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntitiesWithExtInfo atlasEntitiesWithExtInfo = new AtlasEntitiesWithExtInfo();
atlasEntitiesWithExtInfo.addEntity(a1);
atlasEntitiesWithExtInfo.addEntity(a2);
@@ -261,15 +255,15 @@ public abstract class InverseReferenceUpdateV1Test {
public void testInverseReferenceAutoUpdate_NonComposite_ManyToMany() throws Exception {
AtlasEntityType bType = typeRegistry.getEntityTypeByName("B");
AtlasEntity a1 = new AtlasEntity("A");
- a1.setAttribute(NAME, TestUtils.randomString());
+ a1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity a2 = new AtlasEntity("A");
- a2.setAttribute(NAME, TestUtils.randomString());
+ a2.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity a3 = new AtlasEntity("A");
- a3.setAttribute(NAME, TestUtils.randomString());
+ a3.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b1 = new AtlasEntity("B");
- b1.setAttribute(NAME, TestUtils.randomString());
+ b1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b2 = new AtlasEntity("B");
- b2.setAttribute(NAME, TestUtils.randomString());
+ b2.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntitiesWithExtInfo atlasEntitiesWithExtInfo = new AtlasEntitiesWithExtInfo();
atlasEntitiesWithExtInfo.addEntity(a1);
atlasEntitiesWithExtInfo.addEntity(a2);
@@ -297,13 +291,13 @@ public abstract class InverseReferenceUpdateV1Test {
@Test
public void testInverseReferenceAutoUpdate_Map() throws Exception {
AtlasEntity a1 = new AtlasEntity("A");
- a1.setAttribute(NAME, TestUtils.randomString());
+ a1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b1 = new AtlasEntity("B");
- b1.setAttribute(NAME, TestUtils.randomString());
+ b1.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b2 = new AtlasEntity("B");
- b2.setAttribute(NAME, TestUtils.randomString());
+ b2.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntity b3 = new AtlasEntity("B");
- b3.setAttribute(NAME, TestUtils.randomString());
+ b3.setAttribute(NAME, TestUtilsV2.randomString());
AtlasEntitiesWithExtInfo atlasEntitiesWithExtInfo = new AtlasEntitiesWithExtInfo();
atlasEntitiesWithExtInfo.addEntity(a1);
atlasEntitiesWithExtInfo.addEntity(b1);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1Test.java
deleted file mode 100644
index 3f1ed03..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/SoftDeleteHandlerV1Test.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.store.graph.v1;
-
-
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.model.instance.AtlasObjectId;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-
-import javax.inject.Inject;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
-import static org.apache.atlas.TestUtils.NAME;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
-@Guice(modules = TestModules.SoftDeleteModule.class)
-public class SoftDeleteHandlerV1Test extends AtlasDeleteHandlerV1Test {
-
- @Inject
- MetadataService metadataService;
-
- @Override
- protected void assertDeletedColumn(final AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws AtlasBaseException {
- final List<AtlasObjectId> columns = (List<AtlasObjectId>) tableInstance.getEntity().getAttribute(COLUMNS_ATTR_NAME);
- Assert.assertEquals(columns.size(), 3);
-
- final AtlasEntity.AtlasEntityWithExtInfo colDeleted = entityStore.getById(columns.get(0).getGuid());
- assertEquals(colDeleted.getEntity().getStatus(), AtlasEntity.Status.DELETED);
- }
-
- @Override
- protected void assertTestDeleteEntities(final AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws Exception {
- //Assert that the deleted table can be fully constructed back
- List<IReferenceableInstance> columns = (List<IReferenceableInstance>) tableInstance.getEntity().getAttribute(COLUMNS_ATTR_NAME);
- assertEquals(columns.size(), 3);
- assertNotNull(tableInstance.getEntity().getAttribute("database"));
- }
-
- @Override
- protected void assertTableForTestDeleteReference(final String tableId) throws Exception {
-
- ITypedReferenceableInstance table = metadataService.getEntityDefinition(tableId);
- assertNotNull(table.get(NAME));
- assertNotNull(table.get("description"));
- assertNotNull(table.get("type"));
- assertNotNull(table.get("tableType"));
- assertNotNull(table.get("created"));
-
- Id dbId = (Id) table.get("database");
- assertNotNull(dbId);
-
- ITypedReferenceableInstance db = metadataService.getEntityDefinition(dbId.getId()._getId());
- assertNotNull(db);
- assertEquals(db.getId().getState(), Id.EntityState.ACTIVE);
-
- }
-
- @Override
- protected void assertColumnForTestDeleteReference(final AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws AtlasBaseException {
- List<AtlasObjectId> columns = (List<AtlasObjectId>) tableInstance.getEntity().getAttribute(COLUMNS_ATTR_NAME);
- assertEquals(columns.size(), 1);
-
- final AtlasEntity.AtlasEntityWithExtInfo byId = entityStore.getById(columns.get(0).getGuid());
- assertEquals(byId.getEntity().getStatus(), AtlasEntity.Status.DELETED);
- }
-
- @Override
- protected void assertProcessForTestDeleteReference(final AtlasEntityHeader processInstance) throws Exception {
- //
- ITypedReferenceableInstance process = metadataService.getEntityDefinition(processInstance.getGuid());
- List<ITypedReferenceableInstance> outputs =
- (List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
- List<ITypedReferenceableInstance> expectedOutputs =
- (List<ITypedReferenceableInstance>) process.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS);
- assertEquals(outputs.size(), expectedOutputs.size());
-
- }
-
- @Override
- protected void assertEntityDeleted(final String id) throws Exception {
- final AtlasEntity.AtlasEntityWithExtInfo byId = entityStore.getById(id);
- assertEquals(byId.getEntity().getStatus(), AtlasEntity.Status.DELETED);
- }
-
- @Override
- protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(final String janeGuid) throws Exception {
- // Verify Jane's subordinates reference cardinality is still 2.
- ITypedReferenceableInstance jane = metadataService.getEntityDefinition(janeGuid);
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- Assert.assertEquals(subordinates.size(), 2);
- }
-
- @Override
- protected void assertJohnForTestDisconnectBidirectionalReferences(final AtlasEntity.AtlasEntityWithExtInfo john, final String janeGuid) throws Exception {
- AtlasObjectId mgr = (AtlasObjectId) john.getEntity().getAttribute("manager");
- assertNotNull(mgr);
- assertEquals(mgr.getGuid(), janeGuid);
-
-
- final AtlasEntity.AtlasEntityWithExtInfo mgrEntity = entityStore.getById(mgr.getGuid());
- assertEquals(mgrEntity.getEntity().getStatus(), AtlasEntity.Status.DELETED);
- }
-
- @Override
- protected void assertMaxForTestDisconnectBidirectionalReferences(final Map<String, String> nameGuidMap) throws Exception {
-
- // Verify that the Department.employees reference to the deleted employee
- // was disconnected.
- ITypedReferenceableInstance hrDept = metadataService.getEntityDefinition(nameGuidMap.get("hr"));
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
- Assert.assertEquals(employees.size(), 4);
- String maxGuid = nameGuidMap.get("Max");
- for (ITypedReferenceableInstance employee : employees) {
- if (employee.getId()._getId().equals(maxGuid)) {
- assertEquals(employee.getId().getState(), Id.EntityState.DELETED);
- }
- }
-
- // Verify that the Manager.subordinates still references deleted employee
- ITypedReferenceableInstance jane = metadataService.getEntityDefinition(nameGuidMap.get("Jane"));
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- assertEquals(subordinates.size(), 2);
- for (ITypedReferenceableInstance subordinate : subordinates) {
- if (subordinate.getId()._getId().equals(maxGuid)) {
- assertEquals(subordinate.getId().getState(), Id.EntityState.DELETED);
- }
- }
-
- // Verify that max's Person.mentor unidirectional reference to john was disconnected.
- ITypedReferenceableInstance john = metadataService.getEntityDefinition(nameGuidMap.get("John"));
- Id mentor = (Id) john.get("mentor");
- assertEquals(mentor._getId(), maxGuid);
- assertEquals(mentor.getState(), Id.EntityState.DELETED);
-
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(final List<AtlasObjectId> columns, final String columnGuid) throws AtlasBaseException {
- Assert.assertEquals(columns.size(), 3);
- for (AtlasObjectId column : columns) {
- AtlasEntity.AtlasEntityWithExtInfo columnEntity = entityStore.getById(column.getGuid());
- if (column.getGuid().equals(columnGuid)) {
- assertEquals(columnEntity.getEntity().getStatus(), AtlasEntity.Status.DELETED);
- } else {
- assertEquals(columnEntity.getEntity().getStatus(), AtlasEntity.Status.ACTIVE);
- }
- }
- }
-
- @Override
- protected void assertTestDisconnectMapReferenceFromClassType(final String mapOwnerGuid) throws Exception {
- AtlasEntity.AtlasEntityWithExtInfo mapOwnerInstance = entityStore.getById(mapOwnerGuid);
- Map<String, AtlasObjectId> map =
- (Map<String, AtlasObjectId>) mapOwnerInstance.getEntity().getAttribute("map");
- assertNotNull(map);
- assertEquals(map.size(), 1);
- Map<String, AtlasObjectId> biMap =
- (Map<String, AtlasObjectId>) mapOwnerInstance.getEntity().getAttribute("biMap");
- assertNotNull(biMap);
- assertEquals(biMap.size(), 1);
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(final String structContainerGuid) throws Exception {
- // Verify that the unidirectional references from the struct and trait instances
- // to the deleted entities were not disconnected.
- ITypedReferenceableInstance structContainerConvertedEntity =
- metadataService.getEntityDefinition(structContainerGuid);
- ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
- assertNotNull(struct.get("target"));
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- assertNotNull(trait);
- assertNotNull(trait.get("target"));
-
- }
-
- @Override
- protected void assertVerticesDeleted(List<AtlasVertex> vertices) {
- for (AtlasVertex vertex : vertices) {
- assertEquals(GraphHelper.getSingleValuedProperty(vertex, Constants.STATE_PROPERTY_KEY, String.class), Id.EntityState.DELETED.name());
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java b/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
deleted file mode 100755
index 6e3dabb..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/typestore/GraphBackedTypeStoreTest.java
+++ /dev/null
@@ -1,256 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.typestore;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.Collections;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createStructTypeDef;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphBackedTypeStoreTest {
-
- private static final String DESCRIPTION = "_description";
-
- @Inject
- private ITypeStore typeStore;
-
- private TypeSystem ts;
-
- @BeforeClass
- public void setUp() throws Exception {
- ts = TypeSystem.getInstance();
- ts.reset();
- TestUtils.defineDeptEmployeeTypes(ts);
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- ts.reset();
-// AtlasGraphProvider.cleanup();
- }
-
-
- @Test
- public void testStore() throws AtlasException {
- ImmutableList<String> typeNames = ts.getTypeNames();
- typeStore.store(ts, typeNames);
- dumpGraph();
- }
-
- @Test(dependsOnMethods = "testStore")
- public void testRestoreType() throws Exception {
- TypesDef typesDef = typeStore.restoreType("Manager");
- verifyRestoredClassType(typesDef, "Manager");
- }
-
- private void dumpGraph() {
- AtlasGraph<?, ?> graph = TestUtils.getGraph();
- for (AtlasVertex<?,?> v : graph.getVertices()) {
- System.out.println("****v = " + GraphHelper.vertexString(v));
- for (AtlasEdge<?,?> e : v.getEdges(AtlasEdgeDirection.OUT)) {
- System.out.println("****e = " + GraphHelper.edgeString(e));
- }
- }
- }
-
- @Test(dependsOnMethods = "testStore")
- public void testRestore() throws Exception {
- TypesDef types = typeStore.restore();
-
- //validate enum
- List<EnumTypeDefinition> enumTypes = types.enumTypesAsJavaList();
- Assert.assertEquals(1, enumTypes.size());
- EnumTypeDefinition orgLevel = enumTypes.get(0);
- Assert.assertEquals(orgLevel.name, "OrgLevel");
- Assert.assertEquals(orgLevel.description, "OrgLevel"+DESCRIPTION);
- Assert.assertEquals(orgLevel.enumValues.length, 2);
- EnumValue enumValue = orgLevel.enumValues[0];
- Assert.assertEquals(enumValue.value, "L1");
- Assert.assertEquals(enumValue.ordinal, 1);
-
- //validate class
- List<StructTypeDefinition> structTypes = types.structTypesAsJavaList();
- Assert.assertEquals(1, structTypes.size());
-
- verifyRestoredClassType(types, "Manager");
-
- //validate trait
- List<HierarchicalTypeDefinition<TraitType>> traitTypes = types.traitTypesAsJavaList();
- Assert.assertEquals(1, traitTypes.size());
- HierarchicalTypeDefinition<TraitType> trait = traitTypes.get(0);
- Assert.assertEquals("SecurityClearance", trait.typeName);
- Assert.assertEquals(trait.typeName+DESCRIPTION, trait.typeDescription);
- Assert.assertEquals(1, trait.attributeDefinitions.length);
- AttributeDefinition attribute = trait.attributeDefinitions[0];
- Assert.assertEquals("level", attribute.name);
- Assert.assertEquals(DataTypes.INT_TYPE.getName(), attribute.dataTypeName);
-
- //validate the new types
- ts.reset();
- ts.defineTypes(types);
- }
-
- @Test
- public void testTypeWithSpecialChars() throws AtlasException {
- HierarchicalTypeDefinition<ClassType> specialTypeDef1 = createClassTypeDef("SpecialTypeDef1", "Typedef with special character",
- ImmutableSet.<String>of(), createRequiredAttrDef("attribute$", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> specialTypeDef2 = createClassTypeDef("SpecialTypeDef2", "Typedef with special character",
- ImmutableSet.<String>of(), createRequiredAttrDef("attribute%", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> specialTypeDef3 = createClassTypeDef("SpecialTypeDef3", "Typedef with special character",
- ImmutableSet.<String>of(), createRequiredAttrDef("attribute{", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> specialTypeDef4 = createClassTypeDef("SpecialTypeDef4", "Typedef with special character",
- ImmutableSet.<String>of(), createRequiredAttrDef("attribute}", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> specialTypeDef5 = createClassTypeDef("SpecialTypeDef5", "Typedef with special character",
- ImmutableSet.<String>of(), createRequiredAttrDef("attribute$%{}", DataTypes.STRING_TYPE));
-
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(specialTypeDef1, specialTypeDef2, specialTypeDef3, specialTypeDef4, specialTypeDef5));
-
- Map<String, IDataType> createdTypes = ts.defineTypes(typesDef);
- typeStore.store(ts, ImmutableList.copyOf(createdTypes.keySet()));
-
- //Validate the updated types
- TypesDef types = typeStore.restore();
- ts.reset();
- ts.defineTypes(types);
- }
-
- @Test(dependsOnMethods = "testStore")
- public void testTypeUpdate() throws Exception {
- //Add enum value
- String _description = "_description_updated";
- EnumTypeDefinition orgLevelEnum = new EnumTypeDefinition("OrgLevel", "OrgLevel"+_description, new EnumValue("L1", 1),
- new EnumValue("L2", 2), new EnumValue("L3", 3));
-
- //Add attribute
- StructTypeDefinition addressDetails =
- createStructTypeDef("Address", createRequiredAttrDef("street", DataTypes.STRING_TYPE),
- createRequiredAttrDef("city", DataTypes.STRING_TYPE),
- createOptionalAttrDef("state", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", "Department"+_description,
- ImmutableSet.<String>of(), createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.OPTIONAL,
- true, "department"),
- new AttributeDefinition("positions", String.format("map<%s,%s>", DataTypes.STRING_TYPE.getName(), "Person"), Multiplicity.OPTIONAL, false, null));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.of(orgLevelEnum), ImmutableList.of(addressDetails),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(deptTypeDef));
-
- Map<String, IDataType> typesAdded = ts.updateTypes(typesDef);
- typeStore.store(ts, ImmutableList.copyOf(typesAdded.keySet()));
-
- verifyEdges();
-
- //Validate the updated types
- TypesDef types = typeStore.restore();
- ts.reset();
- ts.defineTypes(types);
-
- //Assert new enum value
- EnumType orgLevel = ts.getDataType(EnumType.class, orgLevelEnum.name);
- Assert.assertEquals(orgLevel.name, orgLevelEnum.name);
- Assert.assertEquals(orgLevel.description, orgLevelEnum.description);
- Assert.assertEquals(orgLevel.values().size(), orgLevelEnum.enumValues.length);
- Assert.assertEquals(orgLevel.fromValue("L3").ordinal, 3);
-
- //Assert new attribute
- StructType addressType = ts.getDataType(StructType.class, addressDetails.typeName);
- Assert.assertEquals(addressType.numFields, 3);
- Assert.assertEquals(addressType.fieldMapping.fields.get("state").dataType(), DataTypes.STRING_TYPE);
-
- //Updating the definition again shouldn't add another edge
- typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(deptTypeDef));
- typesAdded = ts.updateTypes(typesDef);
- typeStore.store(ts, ImmutableList.copyOf(typesAdded.keySet()));
- verifyEdges();
- }
-
- private void verifyEdges() throws RepositoryException {
- // ATLAS-474: verify that type update did not write duplicate edges to the type store.
- if (typeStore instanceof GraphBackedTypeStore) {
- GraphBackedTypeStore gbTypeStore = (GraphBackedTypeStore) typeStore;
- AtlasVertex typeVertex = gbTypeStore.findVertices(Collections.singletonList("Department")).get("Department");
- int edgeCount = countOutgoingEdges(typeVertex, gbTypeStore.getEdgeLabel("Department", "employees"));
- Assert.assertEquals(edgeCount, 1, "Should only be 1 edge for employees attribute on Department type AtlasVertex");
- }
- }
-
- private int countOutgoingEdges(AtlasVertex typeVertex, String edgeLabel) {
-
- Iterator<AtlasEdge> outGoingEdgesByLabel = GraphHelper.getInstance().getOutGoingEdgesByLabel(typeVertex, edgeLabel);
- int edgeCount = 0;
- for (; outGoingEdgesByLabel.hasNext();) {
- outGoingEdgesByLabel.next();
- edgeCount++;
- }
- return edgeCount;
- }
-
- private void verifyRestoredClassType(TypesDef types, String typeName) throws AtlasException {
- boolean clsTypeFound = false;
- List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList();
- for (HierarchicalTypeDefinition<ClassType> classType : classTypes) {
- if (classType.typeName.equals(typeName)) {
- ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName);
- Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length);
- Assert.assertEquals(expectedType.superTypes.size(), classType.superTypes.size());
- Assert.assertEquals(classType.typeDescription, classType.typeName+DESCRIPTION);
- clsTypeFound = true;
- }
- }
- Assert.assertTrue(clsTypeFound, typeName + " type not restored");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/typestore/StoreBackedTypeCacheTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/typestore/StoreBackedTypeCacheTest.java b/repository/src/test/java/org/apache/atlas/repository/typestore/StoreBackedTypeCacheTest.java
deleted file mode 100644
index 2ea63ff..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/typestore/StoreBackedTypeCacheTest.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.typestore;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.TypeUtils;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.HashMap;
-import java.util.Map;
-
-
-/**
- * Unit test for {@link StoreBackedTypeCache}
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class StoreBackedTypeCacheTest {
-
- @Inject
- private ITypeStore typeStore;
-
- @Inject
- private StoreBackedTypeCache typeCache;
-
- private TypeSystem ts;
-
- private Map<String, ClassType> classTypesToTest = new HashMap<>();
-
- @Inject
- public StoreBackedTypeCacheTest() {
- }
-
- @BeforeClass
- public void setUp() throws Exception {
- //force graph to be initialized up front
- TestUtils.getGraph();
-
- ts = TypeSystem.getInstance();
- ts.reset();
- ts.setTypeCache(typeCache);
-
- // Populate the type store for testing.
- TestUtils.defineDeptEmployeeTypes(ts);
- TestUtils.createHiveTypes(ts);
- ImmutableList<String> typeNames = ts.getTypeNames();
- typeStore.store(ts, typeNames);
-
- ClassType type = ts.getDataType(ClassType.class, "Manager");
- classTypesToTest.put("Manager", type);
- type = ts.getDataType(ClassType.class, TestUtils.TABLE_TYPE);
- classTypesToTest.put(TestUtils.TABLE_TYPE, type);
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- ts.reset();
-// AtlasGraphProvider.cleanup();
- }
-
- @BeforeMethod
- public void setupTestMethod() throws Exception {
- typeCache.clear();
- }
-
- @Test
- public void testGetClassType() throws Exception {
- for (Map.Entry<String, ClassType> typeEntry : classTypesToTest.entrySet()) {
- // Not cached yet
- Assert.assertFalse(typeCache.isCachedInMemory(typeEntry.getKey()));
-
- IDataType dataType = ts.getDataType(IDataType.class, typeEntry.getKey());
- // Verify the type is now cached.
- Assert.assertTrue(typeCache.isCachedInMemory(typeEntry.getKey()));
-
- Assert.assertTrue(dataType instanceof ClassType);
- ClassType cachedType = (ClassType)dataType;
- // Verify that get() also loaded and cached any dependencies of this type from the type store.
- verifyHierarchicalType(cachedType, typeEntry.getValue());
- }
- }
-
- @Test
- public void testGetTraitType() throws Exception {
- ImmutableList<String> traitNames = ts.getTypeNamesByCategory(TypeCategory.TRAIT);
- for (String traitTypeName : traitNames) {
- // Not cached yet
- Assert.assertFalse(typeCache.isCachedInMemory(traitTypeName));
-
- IDataType dataType = typeCache.get(traitTypeName);
- // Verify the type is now cached.
- Assert.assertTrue(typeCache.isCachedInMemory(traitTypeName));
-
- Assert.assertTrue(dataType instanceof TraitType);
- TraitType cachedType = (TraitType)dataType;
- // Verify that get() also loaded and cached any dependencies of this type from the type store.
- verifyHierarchicalType(cachedType, ts.getDataType(TraitType.class, traitTypeName));
- }
- }
-
- private <T extends HierarchicalType> void verifyHierarchicalType(T dataType, T expectedDataType) throws AtlasException {
- Assert.assertEquals(dataType.numFields, expectedDataType.numFields);
- Assert.assertEquals(dataType.immediateAttrs.size(), expectedDataType.immediateAttrs.size());
- Assert.assertEquals(dataType.fieldMapping().fields.size(), expectedDataType.fieldMapping().fields.size());
- ImmutableSet<String> superTypes = dataType.superTypes;
- Assert.assertEquals(superTypes.size(), expectedDataType.superTypes.size());
-
- // Verify that any attribute and super types were also cached.
- for (String superTypeName : superTypes) {
- Assert.assertTrue(typeCache.has(superTypeName));
- }
- for (AttributeInfo attrInfo : dataType.fieldMapping().fields.values()) {
- switch (attrInfo.dataType().getTypeCategory()) {
- case CLASS:
- case STRUCT:
- case ENUM:
- Assert.assertTrue(typeCache.has(attrInfo.dataType().getName()), attrInfo.dataType().getName() + " should be cached");
- break;
- case ARRAY:
- String elementTypeName = TypeUtils.parseAsArrayType(attrInfo.dataType().getName());
- if (!ts.getCoreTypes().contains(elementTypeName)) {
- Assert.assertTrue(typeCache.has(elementTypeName), elementTypeName + " should be cached");
- }
- break;
- case MAP:
- String[] mapTypeNames = TypeUtils.parseAsMapType(attrInfo.dataType().getName());
- for (String typeName : mapTypeNames) {
- if (!ts.getCoreTypes().contains(typeName)) {
- Assert.assertTrue(typeCache.has(typeName), typeName + " should be cached");
- }
- }
- break;
- default:
- break;
- }
- }
- }
-}
[39/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/notification/HookNotificationV1.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/notification/HookNotificationV1.java b/intg/src/main/java/org/apache/atlas/v1/model/notification/HookNotificationV1.java
new file mode 100644
index 0000000..c70e7d0
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/notification/HookNotificationV1.java
@@ -0,0 +1,357 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.v1.model.notification;
+
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.typedef.TypesDef;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+/**
+ * Contains the structure of messages transferred from hooks to atlas.
+ */
+public class HookNotificationV1 {
+
+ /**
+ * Hook message for create type definitions.
+ */
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class TypeRequest extends HookNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private TypesDef typesDef;
+
+ public TypeRequest() {
+ }
+
+ public TypeRequest(HookNotificationType type, TypesDef typesDef, String user) {
+ super(type, user);
+ this.typesDef = typesDef;
+ }
+
+ public TypesDef getTypesDef() {
+ return typesDef;
+ }
+
+ public void setTypesDef(TypesDef typesDef) {
+ this.typesDef = typesDef;
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("TypeRequest{");
+ super.toString(sb);
+ sb.append("typesDef=");
+ if (typesDef != null) {
+ typesDef.toString(sb);
+ }
+ sb.append("}");
+
+ return sb;
+ }
+ }
+
+ /**
+ * Hook message for creating new entities.
+ */
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class EntityCreateRequest extends HookNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private List<Referenceable> entities;
+
+ public EntityCreateRequest() {
+ }
+
+ public EntityCreateRequest(String user, Referenceable... entities) {
+ this(HookNotificationType.ENTITY_CREATE, Arrays.asList(entities), user);
+ }
+
+ public EntityCreateRequest(String user, List<Referenceable> entities) {
+ this(HookNotificationType.ENTITY_CREATE, entities, user);
+ }
+
+ protected EntityCreateRequest(HookNotificationType type, List<Referenceable> entities, String user) {
+ super(type, user);
+
+ this.entities = entities;
+ }
+
+ public List<Referenceable> getEntities() {
+ return entities;
+ }
+
+ public void setEntities(List<Referenceable> entities) {
+ this.entities = entities;
+ }
+
+ @Override
+ public void normalize() {
+ super.normalize();
+
+ if (entities != null) {
+ for (Referenceable entity : entities) {
+ if (entity != null) {
+ entity.normailze();
+ }
+ }
+ }
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityCreateRequest{");
+ super.toString(sb);
+ sb.append("entities=[");
+ AtlasBaseTypeDef.dumpObjects(getEntities(), sb);
+ sb.append("]");
+ sb.append("}");
+
+ return sb;
+ }
+ }
+
+ /**
+ * Hook message for updating entities(full update).
+ */
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class EntityUpdateRequest extends EntityCreateRequest implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public EntityUpdateRequest() {
+ }
+
+ public EntityUpdateRequest(String user, Referenceable... entities) {
+ this(user, Arrays.asList(entities));
+ }
+
+ public EntityUpdateRequest(String user, List<Referenceable> entities) {
+ super(HookNotificationType.ENTITY_FULL_UPDATE, entities, user);
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityUpdateRequest{");
+ super.toString(sb);
+ sb.append("entities=[");
+ AtlasBaseTypeDef.dumpObjects(getEntities(), sb);
+ sb.append("]");
+ sb.append("}");
+
+ return sb;
+ }
+ }
+
+ /**
+ * Hook message for updating entities(partial update).
+ */
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class EntityPartialUpdateRequest extends HookNotification {
+ private static final long serialVersionUID = 1L;
+
+ private String typeName;
+ private String attribute;
+ private String attributeValue;
+ private Referenceable entity;
+
+ public EntityPartialUpdateRequest() {
+ }
+
+ public EntityPartialUpdateRequest(String user, String typeName, String attribute, String attributeValue, Referenceable entity) {
+ super(HookNotificationType.ENTITY_PARTIAL_UPDATE, user);
+
+ this.typeName = typeName;
+ this.attribute = attribute;
+ this.attributeValue = attributeValue;
+ this.entity = entity;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
+ }
+
+ public String getAttribute() {
+ return attribute;
+ }
+
+ public void setAttribute(String attribute) {
+ this.attribute = attribute;
+ }
+
+ public String getAttributeValue() {
+ return attributeValue;
+ }
+
+ public void setAttributeValue(String attributeValue) {
+ this.attributeValue = attributeValue;
+ }
+
+ public Referenceable getEntity() {
+ return entity;
+ }
+
+ public void setEntity(Referenceable entity) {
+ this.entity = entity;
+ }
+
+ @Override
+ public void normalize() {
+ super.normalize();
+
+ if (entity != null) {
+ entity.normailze();
+ }
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityPartialUpdateRequest{");
+ super.toString(sb);
+ sb.append("typeName=").append(typeName);
+ sb.append("attribute=").append(attribute);
+ sb.append("attributeValue=").append(attributeValue);
+ sb.append("entity=");
+ if (entity != null) {
+ entity.toString(sb);
+ }
+ sb.append("}");
+
+ return sb;
+ }
+ }
+
+ /**
+ * Hook message for entity delete.
+ */
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class EntityDeleteRequest extends HookNotification implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String typeName;
+ private String attribute;
+ private String attributeValue;
+
+ public EntityDeleteRequest() {
+ }
+
+ public EntityDeleteRequest(String user, String typeName, String attribute, String attributeValue) {
+ this(HookNotificationType.ENTITY_DELETE, user, typeName, attribute, attributeValue);
+ }
+
+ protected EntityDeleteRequest(HookNotificationType type, String user, String typeName, String attribute, String attributeValue) {
+ super(type, user);
+
+ this.typeName = typeName;
+ this.attribute = attribute;
+ this.attributeValue = attributeValue;
+ }
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
+ }
+
+ public String getAttribute() {
+ return attribute;
+ }
+
+ public void setAttribute(String attribute) {
+ this.attribute = attribute;
+ }
+
+ public String getAttributeValue() {
+ return attributeValue;
+ }
+
+ public void setAttributeValue(String attributeValue) {
+ this.attributeValue = attributeValue;
+ }
+
+ @Override
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("EntityDeleteRequest{");
+ super.toString(sb);
+ sb.append("typeName=").append(typeName);
+ sb.append("attribute=").append(attribute);
+ sb.append("attributeValue=").append(attributeValue);
+ sb.append("}");
+
+ return sb;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/AttributeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/AttributeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/AttributeDefinition.java
new file mode 100644
index 0000000..a64425c
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/AttributeDefinition.java
@@ -0,0 +1,179 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class AttributeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String name;
+ private String dataTypeName;
+ private Multiplicity multiplicity;
+ private boolean isComposite; // A composite is the one whose lifecycle is dependent on the enclosing type and is not just a reference
+ private boolean isUnique;
+ private boolean isIndexable;
+ private String reverseAttributeName; // If this is a reference attribute, then the name of the attribute on the Class that this refers to.
+ private String defaultValue;
+ private String description;
+
+
+
+ public AttributeDefinition() {
+ }
+
+ public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity) {
+ this(name, dataTypeName, multiplicity, false, false, true, null);
+ }
+
+ public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite,
+ String reverseAttributeName) {
+ this(name, dataTypeName, multiplicity, isComposite, false, false, reverseAttributeName);
+ }
+
+ public AttributeDefinition(String name, String dataTypeName, Multiplicity multiplicity, boolean isComposite, boolean isUnique, boolean isIndexable, String reverseAttributeName) {
+ this.name = name;
+ this.dataTypeName = dataTypeName;
+ this.multiplicity = multiplicity;
+ this.isComposite = isComposite;
+ this.isUnique = isUnique;
+ this.isIndexable = isIndexable;
+ this.reverseAttributeName = reverseAttributeName;
+ }
+
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getDataTypeName() {
+ return dataTypeName;
+ }
+
+ public void setDataTypeName(String dataTypeName) {
+ this.dataTypeName = dataTypeName;
+ }
+
+ public Multiplicity getMultiplicity() {
+ return multiplicity;
+ }
+
+ public void setMultiplicity(Multiplicity multiplicity) {
+ this.multiplicity = multiplicity;
+ }
+
+ public boolean getIsComposite() {
+ return isComposite;
+ }
+
+ public void setIsComposite(boolean isComposite) {
+ this.isComposite = isComposite;
+ }
+
+ public boolean getIsUnique() {
+ return isUnique;
+ }
+
+ public void setIsUnique(boolean isUnique) {
+ this.isUnique = isUnique;
+ }
+
+ public boolean getIsIndexable() {
+ return isIndexable;
+ }
+
+ public void setIsIndexable(boolean isIndexable) {
+ this.isIndexable = isIndexable;
+ }
+
+ public String getReverseAttributeName() {
+ return reverseAttributeName;
+ }
+
+ public void setReverseAttributeName(String reverseAttributeName) {
+ this.reverseAttributeName = reverseAttributeName;
+ }
+
+ public String getDefaultValue() {
+ return defaultValue;
+ }
+
+ public void setDefaultValue(final String defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(final String description) {
+ this.description = description;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ AttributeDefinition that = (AttributeDefinition) o;
+
+ return isComposite == that.isComposite &&
+ isUnique == that.isUnique &&
+ isIndexable == that.isIndexable &&
+ Objects.equals(name, that.name) &&
+ Objects.equals(dataTypeName, that.dataTypeName) &&
+ Objects.equals(multiplicity, that.multiplicity) &&
+ Objects.equals(defaultValue, that.defaultValue) &&
+ Objects.equals(description, that.description) &&
+ Objects.equals(reverseAttributeName, that.reverseAttributeName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, dataTypeName, multiplicity, isComposite, isUnique, isIndexable,
+ reverseAttributeName, defaultValue, description);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/ClassTypeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/ClassTypeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/ClassTypeDefinition.java
new file mode 100644
index 0000000..9e7e03c
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/ClassTypeDefinition.java
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Set;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class ClassTypeDefinition extends HierarchicalTypeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+ private static final String META_TYPE_NAME = "org.apache.atlas.typesystem.types.ClassType";
+
+
+ public ClassTypeDefinition() {
+ }
+
+ public ClassTypeDefinition(String typeName, String typeDescription, String typeVersion, List<AttributeDefinition> attributeDefinitions, Set<String> superTypes) {
+ super(typeName, typeDescription, typeVersion, attributeDefinitions, META_TYPE_NAME, superTypes);
+ }
+
+ @Override
+ public String getHierarchicalMetaTypeName() {
+ return META_TYPE_NAME;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/EnumTypeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/EnumTypeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/EnumTypeDefinition.java
new file mode 100644
index 0000000..d2fdaf8
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/EnumTypeDefinition.java
@@ -0,0 +1,174 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class EnumTypeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String name;
+ private String description;
+ private String version;
+ private List<EnumValue> enumValues;
+
+
+ public EnumTypeDefinition() {
+ }
+
+ public EnumTypeDefinition(String name, String description, String version, List<EnumValue> enumValues) {
+ this.name = name;
+ this.description = description;
+ this.version = version;
+ this.enumValues = enumValues;
+ }
+
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public List<EnumValue> getEnumValues() {
+ return enumValues;
+ }
+
+ public void setEnumValues(List<EnumValue> enumValues) {
+ this.enumValues = enumValues;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ EnumTypeDefinition that = (EnumTypeDefinition) o;
+
+ return Objects.equals(name, that.name) &&
+ Objects.equals(description, that.description) &&
+ Objects.equals(version, that.version) &&
+ Objects.equals(enumValues, that.enumValues);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, description, version, enumValues);
+ }
+
+
+ @JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+ @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+ @JsonIgnoreProperties(ignoreUnknown=true)
+ @XmlRootElement
+ @XmlAccessorType(XmlAccessType.PROPERTY)
+ public static class EnumValue implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String value;
+ private int ordinal;
+
+ public EnumValue() {
+ }
+
+ public EnumValue(String value, int ordinal) {
+ this.value = value;
+ this.ordinal = ordinal;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public int getOrdinal() {
+ return ordinal;
+ }
+
+ public void setOrdinal(int ordinal) {
+ this.ordinal = ordinal;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ EnumValue that = (EnumValue) o;
+
+ return ordinal == that.ordinal &&
+ Objects.equals(value, that.value);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(value, ordinal);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/HierarchicalTypeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/HierarchicalTypeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/HierarchicalTypeDefinition.java
new file mode 100644
index 0000000..65d63a7
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/HierarchicalTypeDefinition.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class HierarchicalTypeDefinition extends StructTypeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+
+ private String hierarchicalMetaTypeName = "org.apache.atlas.typesystem.types.TraitType";
+ private Set<String> superTypes;
+
+
+ public HierarchicalTypeDefinition() {
+ }
+
+ public HierarchicalTypeDefinition(String typeName, String typeDescription, String typeVersion, List<AttributeDefinition> attributeDefinitions, String hierarchicalMetaTypeName, Set<String> superTypes) {
+ super(typeName, typeDescription, typeVersion, attributeDefinitions);
+
+ this.hierarchicalMetaTypeName = hierarchicalMetaTypeName;
+ this.superTypes = superTypes;
+ }
+
+ public String getHierarchicalMetaTypeName() {
+ return hierarchicalMetaTypeName;
+ }
+
+ public void setHierarchicalMetaTypeName(String hierarchicalMetaTypeName) {
+ this.hierarchicalMetaTypeName = hierarchicalMetaTypeName;
+ }
+
+ public Set<String> getSuperTypes() {
+ return superTypes;
+ }
+
+ public void setSuperTypes(Set<String> superTypes) {
+ this.superTypes = superTypes;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass() || !super.equals(o)) {
+ return false;
+ }
+
+ HierarchicalTypeDefinition that = (HierarchicalTypeDefinition) o;
+
+ return Objects.equals(superTypes, that.superTypes) &&
+ Objects.equals(hierarchicalMetaTypeName, that.hierarchicalMetaTypeName);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), superTypes, hierarchicalMetaTypeName);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/Multiplicity.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/Multiplicity.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/Multiplicity.java
new file mode 100644
index 0000000..653151b
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/Multiplicity.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.DeserializationContext;
+import org.codehaus.jackson.map.JsonDeserializer;
+import org.codehaus.jackson.map.JsonSerializer;
+import org.codehaus.jackson.map.SerializerProvider;
+import org.codehaus.jackson.map.annotate.JsonDeserialize;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import java.io.IOException;
+import java.io.Serializable;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(using = Multiplicity.MultiplicitySerializer.class, include=JsonSerialize.Inclusion.NON_NULL)
+@JsonDeserialize(using = Multiplicity.MultiplicityDeserializer.class)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class Multiplicity implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
+ public static final Multiplicity REQUIRED = new Multiplicity(1, 1, false);
+ public static final Multiplicity COLLECTION = new Multiplicity(1, Integer.MAX_VALUE, false);
+ public static final Multiplicity SET = new Multiplicity(1, Integer.MAX_VALUE, true);
+
+ private int lower;
+ private int upper;
+ private boolean isUnique;
+
+ public Multiplicity() {
+ this(Multiplicity.REQUIRED);
+ }
+
+ public Multiplicity(Multiplicity copyFrom) {
+ this(copyFrom.lower, copyFrom.upper, copyFrom.isUnique);
+ }
+
+ public Multiplicity(int lower, int upper, boolean isUnique) {
+ this.lower = lower;
+ this.upper = upper;
+ this.isUnique = isUnique;
+ }
+
+ public int getLower() {
+ return lower;
+ }
+
+ public void setLower(int lower) {
+ this.lower = lower;
+ }
+
+ public int getUpper() {
+ return upper;
+ }
+
+ public void setUpper(int upper) {
+ this.upper = upper;
+ }
+
+ public boolean getIsUnique() {
+ return isUnique;
+ }
+
+ public void setIsUnique(boolean isUnique) {
+ this.isUnique = isUnique;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ Multiplicity that = (Multiplicity) o;
+
+ return lower == that.lower &&
+ upper == that.upper &&
+ isUnique == that.isUnique;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(lower, upper, isUnique);
+ }
+
+
+ static class MultiplicitySerializer extends JsonSerializer<Multiplicity> {
+ @Override
+ public void serialize(Multiplicity value, JsonGenerator jgen, SerializerProvider provider) throws IOException {
+ if (value != null) {
+ if (value.equals(Multiplicity.REQUIRED)) {
+ jgen.writeString("required");
+ } else if (value.equals(Multiplicity.OPTIONAL)) {
+ jgen.writeString("optional");
+ } else if (value.equals(Multiplicity.COLLECTION)) {
+ jgen.writeString("collection");
+ } else if (value.equals(Multiplicity.SET)) {
+ jgen.writeString("set");
+ }
+ }
+ }
+ }
+
+ static class MultiplicityDeserializer extends JsonDeserializer<Multiplicity> {
+ @Override
+ public Multiplicity deserialize(JsonParser parser, DeserializationContext context) throws IOException {
+ Multiplicity ret = null;
+
+ String value = parser.readValueAs(String.class);
+
+ if (value != null) {
+ if (value.equals("required")) {
+ ret = new Multiplicity(Multiplicity.REQUIRED);
+ } else if (value.equals("optional")) {
+ ret = new Multiplicity(Multiplicity.OPTIONAL);
+ } else if (value.equals("collection")) {
+ ret = new Multiplicity(Multiplicity.COLLECTION);
+ } else if (value.equals("set")) {
+ ret = new Multiplicity(Multiplicity.SET);
+ }
+ }
+
+ if (ret == null) {
+ ret = new Multiplicity();
+ }
+
+ return ret;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/StructTypeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/StructTypeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/StructTypeDefinition.java
new file mode 100644
index 0000000..842439d
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/StructTypeDefinition.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Objects;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class StructTypeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private String typeName;
+ private String typeDescription;
+ private String typeVersion;
+ private List<AttributeDefinition> attributeDefinitions;
+
+
+ public StructTypeDefinition() {
+ }
+
+ public StructTypeDefinition(String typeName, String typeDescription, List<AttributeDefinition> attributeDefinitions) {
+ this(typeName, typeDescription, "1.0", attributeDefinitions);
+ }
+
+ public StructTypeDefinition(String typeName, String typeDescription, String typeVersion, List<AttributeDefinition> attributeDefinitions) {
+ this.typeName = typeName;
+ this.typeDescription = typeDescription;
+ this.typeVersion = typeVersion;
+ this.attributeDefinitions = attributeDefinitions;
+ }
+
+
+ public String getTypeName() {
+ return typeName;
+ }
+
+ public void setTypeName(String typeName) {
+ this.typeName = typeName;
+ }
+
+ public String getTypeDescription() {
+ return typeDescription;
+ }
+
+ public void setTypeDescription(String typeDescription) {
+ this.typeDescription = typeDescription;
+ }
+
+ public String getTypeVersion() {
+ return typeVersion;
+ }
+
+ public void setTypeVersion(String typeVersion) {
+ this.typeVersion = typeVersion;
+ }
+
+ public List<AttributeDefinition> getAttributeDefinitions() {
+ return attributeDefinitions;
+ }
+
+ public void setAttributeDefinitions(List<AttributeDefinition> attributeDefinitions) {
+ this.attributeDefinitions = attributeDefinitions;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ StructTypeDefinition that = (StructTypeDefinition) o;
+
+ return Objects.equals(typeName, that.typeName) &&
+ Objects.equals(typeDescription, that.typeDescription) &&
+ Objects.equals(typeVersion, that.typeVersion) &&
+ Objects.equals(attributeDefinitions, that.attributeDefinitions);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(typeName, typeDescription, typeVersion, attributeDefinitions);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/TraitTypeDefinition.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/TraitTypeDefinition.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/TraitTypeDefinition.java
new file mode 100644
index 0000000..9caf62a
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/TraitTypeDefinition.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Set;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class TraitTypeDefinition extends HierarchicalTypeDefinition implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+
+ public TraitTypeDefinition() {
+ }
+
+ public TraitTypeDefinition(String typeName, String typeDescription, String typeVersion, List<AttributeDefinition> attributeDefinitions, Set<String> superTypes) {
+ super(typeName, typeDescription, typeVersion, attributeDefinitions, "org.apache.atlas.typesystem.types.TraitType", superTypes);
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/model/typedef/TypesDef.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/model/typedef/TypesDef.java b/intg/src/main/java/org/apache/atlas/v1/model/typedef/TypesDef.java
new file mode 100644
index 0000000..6a8bcb4
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/model/typedef/TypesDef.java
@@ -0,0 +1,118 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.model.typedef;
+
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.io.Serializable;
+import java.util.List;
+
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
+import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.PUBLIC_ONLY;
+
+
+@JsonAutoDetect(getterVisibility=PUBLIC_ONLY, setterVisibility=PUBLIC_ONLY, fieldVisibility=NONE)
+@JsonSerialize(include=JsonSerialize.Inclusion.ALWAYS)
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.PROPERTY)
+public class TypesDef implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private List<EnumTypeDefinition> enumTypes;
+ private List<StructTypeDefinition> structTypes;
+ private List<TraitTypeDefinition> traitTypes;
+ private List<ClassTypeDefinition> classTypes;
+
+
+ public TypesDef() {
+ }
+
+ public TypesDef(List<EnumTypeDefinition> enumTypes, List<StructTypeDefinition> structTypes, List<TraitTypeDefinition> traitTypes, List<ClassTypeDefinition> classTypes) {
+ this.enumTypes = enumTypes;
+ this.structTypes = structTypes;
+ this.traitTypes = traitTypes;
+ this.classTypes = classTypes;
+ }
+
+
+ public List<EnumTypeDefinition> getEnumTypes() {
+ return enumTypes;
+ }
+
+ public void setEnumTypes(List<EnumTypeDefinition> enumTypes) {
+ this.enumTypes = enumTypes;
+ }
+
+ public List<StructTypeDefinition> getStructTypes() {
+ return structTypes;
+ }
+
+ public void setStructTypes(List<StructTypeDefinition> structTypes) {
+ this.structTypes = structTypes;
+ }
+
+ public List<TraitTypeDefinition> getTraitTypes() {
+ return traitTypes;
+ }
+
+ public void setTraitTypes(List<TraitTypeDefinition> traitTypes) {
+ this.traitTypes = traitTypes;
+ }
+
+ public List<ClassTypeDefinition> getClassTypes() {
+ return classTypes;
+ }
+
+ public void setClassTypes(List<ClassTypeDefinition> classTypes) {
+ this.classTypes = classTypes;
+ }
+
+
+ @Override
+ public String toString() {
+ return toString(new StringBuilder()).toString();
+ }
+
+ public StringBuilder toString(StringBuilder sb) {
+ if (sb == null) {
+ sb = new StringBuilder();
+ }
+
+ sb.append("TypesDef{");
+ sb.append("enumTypes=[");
+ AtlasBaseTypeDef.dumpObjects(enumTypes, sb);
+ sb.append("], structTypes=[");
+ AtlasBaseTypeDef.dumpObjects(structTypes, sb);
+ sb.append("], traitTypes=[");
+ AtlasBaseTypeDef.dumpObjects(traitTypes, sb);
+ sb.append("], classTypes=[");
+ AtlasBaseTypeDef.dumpObjects(classTypes, sb);
+ sb.append("]");
+ sb.append("}");
+
+ return sb;
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/main/java/org/apache/atlas/v1/typesystem/types/utils/TypesUtil.java
----------------------------------------------------------------------
diff --git a/intg/src/main/java/org/apache/atlas/v1/typesystem/types/utils/TypesUtil.java b/intg/src/main/java/org/apache/atlas/v1/typesystem/types/utils/TypesUtil.java
new file mode 100644
index 0000000..864623a
--- /dev/null
+++ b/intg/src/main/java/org/apache/atlas/v1/typesystem/types/utils/TypesUtil.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.v1.typesystem.types.utils;
+
+
+import org.apache.atlas.v1.model.typedef.AttributeDefinition;
+import org.apache.atlas.v1.model.typedef.ClassTypeDefinition;
+import org.apache.atlas.v1.model.typedef.Multiplicity;
+import org.apache.atlas.v1.model.typedef.TraitTypeDefinition;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+public class TypesUtil {
+ public static ClassTypeDefinition createClassTypeDef(String name, String description, Set<String> superTypes, AttributeDefinition... attributes) {
+ ClassTypeDefinition ret = new ClassTypeDefinition(name, description, "1.0", Arrays.asList(attributes), superTypes);
+
+ return ret;
+ }
+
+ public static ClassTypeDefinition createClassTypeDef(String name, String description, String typeVersion, Set<String> superTypes, AttributeDefinition... attributes) {
+ ClassTypeDefinition ret = new ClassTypeDefinition(name, description, typeVersion, Arrays.asList(attributes), superTypes);
+
+ return ret;
+ }
+
+ public static TraitTypeDefinition createTraitTypeDef(String name, String description, Set<String> superTypes, AttributeDefinition... attributes) {
+ return createTraitTypeDef(name, description, superTypes, Arrays.asList(attributes));
+ }
+
+ public static TraitTypeDefinition createTraitTypeDef(String name, String description, String typeVersion, Set<String> superTypes, AttributeDefinition... attributes) {
+ return createTraitTypeDef(name, description, typeVersion, superTypes, Arrays.asList(attributes));
+ }
+
+ public static TraitTypeDefinition createTraitTypeDef(String name, String description, Set<String> superTypes, List<AttributeDefinition> attributes) {
+ TraitTypeDefinition ret = new TraitTypeDefinition(name, description, "1.0", attributes, superTypes);
+
+ return ret;
+ }
+
+ public static TraitTypeDefinition createTraitTypeDef(String name, String description, String typeVersion, Set<String> superTypes, List<AttributeDefinition> attributes) {
+ TraitTypeDefinition ret = new TraitTypeDefinition(name, description, typeVersion, attributes, superTypes);
+
+ return ret;
+ }
+
+ public static AttributeDefinition createUniqueRequiredAttrDef(String name, String dataTypeName) {
+ AttributeDefinition ret = new AttributeDefinition(name, dataTypeName, Multiplicity.REQUIRED, false, true, true, null);
+
+ return ret;
+ }
+
+ public static AttributeDefinition createRequiredAttrDef(String name, String dataTypeName) {
+ AttributeDefinition ret = new AttributeDefinition(name, dataTypeName, Multiplicity.REQUIRED, false, false, true, null);
+
+ return ret;
+ }
+
+ public static AttributeDefinition createOptionalAttrDef(String name, String dataTypeName) {
+ AttributeDefinition ret = new AttributeDefinition(name, dataTypeName, Multiplicity.OPTIONAL, false, false, true, null);
+
+ return ret;
+ }
+
+ public static class Pair<L, R> {
+ public L left;
+ public R right;
+
+ public Pair(L left, R right) {
+ this.left = left;
+ this.right = right;
+ }
+
+ public static <L, R> Pair<L, R> of(L left, R right) {
+ return new Pair<>(left, right);
+ }
+
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ Pair p = (Pair)o;
+
+ return Objects.equals(left, p.left) && Objects.equals(right, p.right);
+ }
+
+ public int hashCode() { return Objects.hash(left, right); }
+ }
+}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/intg/src/test/java/org/apache/atlas/TestRelationshipUtilsV2.java
----------------------------------------------------------------------
diff --git a/intg/src/test/java/org/apache/atlas/TestRelationshipUtilsV2.java b/intg/src/test/java/org/apache/atlas/TestRelationshipUtilsV2.java
index d0effd6..02613b5 100755
--- a/intg/src/test/java/org/apache/atlas/TestRelationshipUtilsV2.java
+++ b/intg/src/test/java/org/apache/atlas/TestRelationshipUtilsV2.java
@@ -18,8 +18,6 @@
package org.apache.atlas;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
@@ -38,10 +36,7 @@ import org.apache.commons.lang.StringUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Date;
-import java.util.List;
+import java.util.*;
import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.BOTH;
import static org.apache.atlas.model.typedef.AtlasRelationshipDef.PropagateTags.ONE_TO_TWO;
@@ -146,11 +141,11 @@ public final class TestRelationshipUtilsV2 {
new AtlasRelationshipEndDef(PERSON_TYPE, "sibling", SINGLE),
new AtlasRelationshipEndDef(PERSON_TYPE, "sibling", SINGLE));
- return new AtlasTypesDef(ImmutableList.of(orgLevelType),
- ImmutableList.of(addressType),
- ImmutableList.of(securityClearanceType),
- ImmutableList.of(personType, employeeType, departmentType, managerType),
- ImmutableList.of(employeeDepartmentType, employeeManagerType, employeeMentorsType, employeeFriendsType, personSiblingType));
+ return new AtlasTypesDef(Collections.singletonList(orgLevelType),
+ Collections.singletonList(addressType),
+ Collections.singletonList(securityClearanceType),
+ Arrays.asList(personType, employeeType, departmentType, managerType),
+ Arrays.asList(employeeDepartmentType, employeeManagerType, employeeMentorsType, employeeFriendsType, personSiblingType));
}
public static AtlasEntitiesWithExtInfo getDepartmentEmployeeInstances() {
@@ -282,9 +277,8 @@ public final class TestRelationshipUtilsV2 {
new AtlasRelationshipEndDef(TYPE_B, "mappedFromA", SINGLE),
new AtlasRelationshipEndDef(TYPE_A, "mapToB", SET));
- return new AtlasTypesDef(ImmutableList.<AtlasEnumDef>of(), ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(), ImmutableList.of(aType, bType),
- ImmutableList.of(relationshipType1, relationshipType2, relationshipType3, relationshipType4));
+ return new AtlasTypesDef(Collections.<AtlasEnumDef>emptyList(), Collections.<AtlasStructDef>emptyList(), Collections.<AtlasClassificationDef>emptyList(), Arrays.asList(aType, bType),
+ Arrays.asList(relationshipType1, relationshipType2, relationshipType3, relationshipType4));
}
private static List<AtlasEnumElementDef> getOrgLevelElements() {
@@ -299,8 +293,8 @@ public final class TestRelationshipUtilsV2 {
return typeName + " description";
}
- private static ImmutableSet<String> superType(String superTypeName) {
- return StringUtils.isNotEmpty(superTypeName) ? ImmutableSet.of(superTypeName) : ImmutableSet.<String>of();
+ private static Set<String> superType(String superTypeName) {
+ return StringUtils.isNotEmpty(superTypeName) ? Collections.singleton(superTypeName) : Collections.<String>emptySet();
}
private static List<AtlasObjectId> getAtlasObjectIds(AtlasEntity... entities) {
[17/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasDeleteHandlerV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasDeleteHandlerV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasDeleteHandlerV1Test.java
deleted file mode 100644
index 718538a..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasDeleteHandlerV1Test.java
+++ /dev/null
@@ -1,1141 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.store.graph.v1;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContextV1;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.TestUtilsV2;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.model.instance.AtlasObjectId;
-import org.apache.atlas.model.instance.AtlasStruct;
-import org.apache.atlas.model.instance.EntityMutationResponse;
-import org.apache.atlas.model.instance.EntityMutations;
-import org.apache.atlas.model.typedef.AtlasClassificationDef;
-import org.apache.atlas.model.typedef.AtlasEntityDef;
-import org.apache.atlas.model.typedef.AtlasEnumDef;
-import org.apache.atlas.model.typedef.AtlasStructDef;
-import org.apache.atlas.model.typedef.AtlasTypesDef;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graph.AtlasEdgeLabel;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
-import org.apache.atlas.repository.store.graph.AtlasEntityStore;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.store.AtlasTypeDefStore;
-import org.apache.atlas.type.AtlasEntityType;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.*;
-import static org.mockito.Mockito.mock;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotEquals;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-public abstract class AtlasDeleteHandlerV1Test {
-
- @Inject
- AtlasTypeRegistry typeRegistry;
-
- @Inject
- AtlasTypeDefStore typeDefStore;
-
- @Inject
- AtlasEntityStore entityStore;
-
- @Inject
- MetadataService metadataService;
-
- private AtlasEntityType compositeMapOwnerType;
-
- private AtlasEntityType compositeMapValueType;
-
- private TypeSystem typeSystem = TypeSystem.getInstance();
-
- @BeforeClass
- public void setUp() throws Exception {
- RequestContextV1.clear();
- RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
-
- metadataService = TestUtils.addSessionCleanupWrapper(metadataService);
- final AtlasTypesDef deptTypesDef = TestUtilsV2.defineDeptEmployeeTypes();
- typeDefStore.createTypesDef(deptTypesDef);
-
- final AtlasTypesDef hiveTypesDef = TestUtilsV2.defineHiveTypes();
- typeDefStore.createTypesDef(hiveTypesDef);
-
- // Define type for map value.
- AtlasEntityDef mapValueDef = AtlasTypeUtil.createClassTypeDef("CompositeMapValue", "CompositeMapValue" + "_description", "1.0",
- ImmutableSet.<String>of(),
- AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")
- );
-
- // Define type with map where the value is a composite class reference to MapValue.
- AtlasEntityDef mapOwnerDef = AtlasTypeUtil.createClassTypeDef("CompositeMapOwner", "CompositeMapOwner_description",
- ImmutableSet.<String>of(),
- AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
- new AtlasStructDef.AtlasAttributeDef("map", "map<string,CompositeMapValue>", true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1, false, false,
- new ArrayList<AtlasStructDef.AtlasConstraintDef>() {{
- add(new AtlasStructDef.AtlasConstraintDef(AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF));
- }})
- );
-
- final AtlasTypesDef typesDef = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.of(mapValueDef, mapOwnerDef));
-
- AtlasTypesDef typesToCreate = AtlasTypeDefStoreInitializer.getTypesToCreate(typesDef, typeRegistry);
-
- if (!typesToCreate.isEmpty()) {
- typeDefStore.createTypesDef(typesToCreate);
- }
-
- compositeMapOwnerType = typeRegistry.getEntityTypeByName("CompositeMapOwner");
- compositeMapValueType = typeRegistry.getEntityTypeByName("CompositeMapValue");
- }
-
- @BeforeTest
- public void init() throws Exception {
- RequestContextV1.clear();
- RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
- }
-
- @AfterClass
- public void clear() {
- AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testDeleteAndCreate() throws Exception {
- init();
- final AtlasEntity dbEntity = TestUtilsV2.createDBEntity();
- EntityMutationResponse response = entityStore.createOrUpdate(new AtlasEntityStream(dbEntity), false);
-
- init();
- //delete entity should mark it as deleted
- EntityMutationResponse deleteResponse = entityStore.deleteById(response.getFirstEntityCreated().getGuid());
- AtlasEntityHeader dbEntityCreated = response.getFirstEntityCreated();
- assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).get(0).getGuid(), dbEntityCreated.getGuid());
-
- //get entity by unique attribute should throw EntityNotFoundException
- try {
- metadataService.getEntityDefinition(TestUtils.DATABASE_TYPE, "name", (String) response.getFirstEntityCreated().getAttribute("name"));
- fail("Expected EntityNotFoundException");
- } catch(EntityNotFoundException e) {
- //expected
- }
-
- init();
- //Create the same entity again, should create new entity
- AtlasEntity newDBEntity = TestUtilsV2.createDBEntity((String) dbEntity.getAttribute(NAME));
- EntityMutationResponse newCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(newDBEntity), false);
- assertNotEquals(newCreationResponse.getFirstEntityCreated().getGuid(), response.getFirstEntityCreated().getGuid());
-
- //get by unique attribute should return the new entity
- ITypedReferenceableInstance instance = metadataService.getEntityDefinitionReference(TestUtils.DATABASE_TYPE, "name", (String) dbEntity.getAttribute("name"));
- assertEquals(instance.getId()._getId(), newCreationResponse.getFirstEntityCreated().getGuid());
- }
-
- @Test
- public void testDeleteReference() throws Exception {
- //Deleting column should update table
- final AtlasEntity dbEntity = TestUtilsV2.createDBEntity();
-
- init();
- EntityMutationResponse dbCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(dbEntity), false);
-
- final AtlasEntity tableEntity = TestUtilsV2.createTableEntity(dbEntity);
- final AtlasEntity columnEntity = TestUtilsV2.createColumnEntity(tableEntity);
- tableEntity.setAttribute(COLUMNS_ATTR_NAME, Arrays.asList(AtlasTypeUtil.getAtlasObjectId(columnEntity)));
-
- AtlasEntity.AtlasEntityWithExtInfo input = new AtlasEntity.AtlasEntityWithExtInfo(tableEntity);
- input.addReferredEntity(columnEntity);
-
- init();
- EntityMutationResponse tblCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(input), false);
- final AtlasEntityHeader columnCreated = tblCreationResponse.getFirstCreatedEntityByTypeName(COLUMN_TYPE);
- final AtlasEntityHeader tableCreated = tblCreationResponse.getFirstCreatedEntityByTypeName(TABLE_TYPE);
-
- init();
- EntityMutationResponse deletionResponse = entityStore.deleteById(columnCreated.getGuid());
- assertEquals(deletionResponse.getDeletedEntities().size(), 1);
- assertEquals(deletionResponse.getDeletedEntities().get(0).getGuid(), columnCreated.getGuid());
- assertEquals(deletionResponse.getUpdatedEntities().size(), 1);
- assertEquals(deletionResponse.getUpdatedEntities().get(0).getGuid(), tableCreated.getGuid());
-
- assertEntityDeleted(columnCreated.getGuid());
-
- assertColumnForTestDeleteReference(entityStore.getById(tableCreated.getGuid()));
-
- //Deleting table should update process
- AtlasEntity process = TestUtilsV2.createProcessEntity(null, Arrays.asList(AtlasTypeUtil.getAtlasObjectId(tableCreated)));
- init();
- final EntityMutationResponse processCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(process), false);
-
- init();
- entityStore.deleteById(tableCreated.getGuid());
- assertEntityDeleted(tableCreated.getGuid());
-
- assertTableForTestDeleteReference(tableCreated.getGuid());
- assertProcessForTestDeleteReference(processCreationResponse.getFirstEntityCreated());
- }
-
- @Test
- public void testDeleteEntities() throws Exception {
- // Create a table entity, with 3 composite column entities
- init();
- final AtlasEntity dbEntity = TestUtilsV2.createDBEntity();
- EntityMutationResponse dbCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(dbEntity), false);
-
- final AtlasEntity tableEntity = TestUtilsV2.createTableEntity(dbEntity);
- AtlasEntity.AtlasEntitiesWithExtInfo entitiesInfo = new AtlasEntity.AtlasEntitiesWithExtInfo(tableEntity);
-
- final AtlasEntity columnEntity1 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity1);
- final AtlasEntity columnEntity2 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity2);
- final AtlasEntity columnEntity3 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity3);
-
- tableEntity.setAttribute(COLUMNS_ATTR_NAME, Arrays.asList(AtlasTypeUtil.getAtlasObjectId(columnEntity1),
- AtlasTypeUtil.getAtlasObjectId(columnEntity2),
- AtlasTypeUtil.getAtlasObjectId(columnEntity3)));
-
- init();
-
- final EntityMutationResponse tblCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
-
- final AtlasEntityHeader column1Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity1.getAttribute(NAME));
- final AtlasEntityHeader column2Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity2.getAttribute(NAME));
- final AtlasEntityHeader column3Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity3.getAttribute(NAME));
-
- // Retrieve the table entities from the Repository, to get their guids and the composite column guids.
- ITypedReferenceableInstance tableInstance = metadataService.getEntityDefinitionReference(TestUtils.TABLE_TYPE, NAME, (String) tableEntity.getAttribute(NAME));
- List<IReferenceableInstance> columns = (List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
-
- //Delete column
- String colId = columns.get(0).getId()._getId();
- String tableId = tableInstance.getId()._getId();
-
- init();
-
- EntityMutationResponse deletionResponse = entityStore.deleteById(colId);
- assertEquals(deletionResponse.getDeletedEntities().size(), 1);
- assertEquals(deletionResponse.getDeletedEntities().get(0).getGuid(), colId);
- assertEquals(deletionResponse.getUpdatedEntities().size(), 1);
- assertEquals(deletionResponse.getUpdatedEntities().get(0).getGuid(), tableId);
- assertEntityDeleted(colId);
-
- final AtlasEntity.AtlasEntityWithExtInfo tableEntityCreated = entityStore.getById(tableId);
- assertDeletedColumn(tableEntityCreated);
-
- assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- (List<AtlasObjectId>) tableEntityCreated.getEntity().getAttribute(COLUMNS_ATTR_NAME), colId);
-
- //update by removing a column - col1
- final AtlasEntity tableEntity1 = TestUtilsV2.createTableEntity(dbEntity, (String) tableEntity.getAttribute(NAME));
-
- AtlasEntity.AtlasEntitiesWithExtInfo entitiesInfo1 = new AtlasEntity.AtlasEntitiesWithExtInfo(tableEntity1);
- final AtlasEntity columnEntity3New = TestUtilsV2.createColumnEntity(tableEntity1, (String) column3Created.getAttribute(NAME));
- tableEntity1.setAttribute(COLUMNS_ATTR_NAME, Arrays.asList(AtlasTypeUtil.getAtlasObjectId(columnEntity3New)));
- entitiesInfo1.addReferredEntity(columnEntity3New);
-
- init();
- deletionResponse = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo1), false);
-
- assertEquals(deletionResponse.getDeletedEntities().size(), 1);
- assertEquals(deletionResponse.getDeletedEntities().get(0).getGuid(), column2Created.getGuid());
- assertEntityDeleted(colId);
-
- // Delete the table entities. The deletion should cascade to their composite columns.
- tableInstance = metadataService.getEntityDefinitionReference(TestUtils.TABLE_TYPE, NAME, (String) tableEntity.getAttribute(NAME));
-
- init();
- EntityMutationResponse tblDeletionResponse = entityStore.deleteById(tableInstance.getId()._getId());
- assertEquals(tblDeletionResponse.getDeletedEntities().size(), 2);
-
- final AtlasEntityHeader tableDeleted = tblDeletionResponse.getFirstDeletedEntityByTypeName(TABLE_TYPE);
- final AtlasEntityHeader colDeleted = tblDeletionResponse.getFirstDeletedEntityByTypeName(COLUMN_TYPE);
-
- // Verify that deleteEntities() response has guids for tables and their composite columns.
- Assert.assertTrue(tableDeleted.getGuid().equals(tableInstance.getId()._getId()));
- Assert.assertTrue(colDeleted.getGuid().equals(column3Created.getGuid()));
-
- // Verify that tables and their composite columns have been deleted from the graph Repository.
- assertEntityDeleted(tableDeleted.getGuid());
- assertEntityDeleted(colDeleted.getGuid());
-
- }
-
- protected abstract void assertDeletedColumn(AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws AtlasException, AtlasBaseException;
-
- protected abstract void assertTestDeleteEntities(AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws Exception;
-
- protected abstract void assertTableForTestDeleteReference(String tableId) throws Exception;
-
- protected abstract void assertColumnForTestDeleteReference(AtlasEntity.AtlasEntityWithExtInfo tableInstance)
- throws AtlasBaseException;
-
- protected abstract void assertProcessForTestDeleteReference(AtlasEntityHeader processInstance) throws Exception;
-
- protected abstract void assertEntityDeleted(String id) throws Exception;
-
- String getFirstGuid(Map<String, AtlasEntity> entityMap) {
- return entityMap.keySet().iterator().next();
- }
-
- @Test
- public void testUpdateEntity_MultiplicityOneNonCompositeReference() throws Exception {
- AtlasEntity.AtlasEntitiesWithExtInfo hrDept = TestUtilsV2.createDeptEg2();
- init();
-
- final EntityMutationResponse hrDeptCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(hrDept), false);
- final AtlasEntityHeader deptCreated = hrDeptCreationResponse.getFirstUpdatedEntityByTypeName(DEPARTMENT_TYPE);
- final AtlasEntityHeader maxEmployeeCreated = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.EMPLOYEE_TYPE, NAME, "Max");
- final AtlasEntityHeader johnEmployeeCreated = hrDeptCreationResponse.getUpdatedEntityByTypeNameAndAttribute(TestUtilsV2.EMPLOYEE_TYPE, NAME, "John");
- final AtlasEntityHeader janeEmployeeCreated = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.MANAGER_TYPE, NAME, "Jane");
- final AtlasEntityHeader juliusEmployeeCreated = hrDeptCreationResponse.getUpdatedEntityByTypeNameAndAttribute(TestUtilsV2.MANAGER_TYPE, NAME, "Julius");
-
- ITypedReferenceableInstance max = metadataService.getEntityDefinition(maxEmployeeCreated.getGuid());
- String maxGuid = max.getId()._getId();
- AtlasVertex vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long creationTimestamp = GraphHelper.getSingleValuedProperty(vertex, Constants.TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(creationTimestamp);
-
- Long modificationTimestampPreUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPreUpdate);
-
- AtlasEntity maxEmployee = getEmployeeByName(hrDept, "Max");
- maxEmployee.setAttribute("mentor", AtlasTypeUtil.getAtlasObjectId(johnEmployeeCreated));
- maxEmployee.setAttribute("department", AtlasTypeUtil.getAtlasObjectId(deptCreated));
- maxEmployee.setAttribute("manager", AtlasTypeUtil.getAtlasObjectId(janeEmployeeCreated));
-
- init();
- EntityMutationResponse entityResult = entityStore.createOrUpdate(new AtlasEntityStream(maxEmployee), false);
-
- assertEquals(entityResult.getUpdatedEntities().size(), 1);
- assertTrue(extractGuids(entityResult.getUpdatedEntities()).contains(maxGuid));
-
- // Verify the update was applied correctly - john should now be max's mentor.
- max = metadataService.getEntityDefinition(maxGuid);
- ITypedReferenceableInstance refTarget = (ITypedReferenceableInstance) max.get("mentor");
- Assert.assertEquals(refTarget.getId()._getId(), johnEmployeeCreated.getGuid());
-
- // Verify modification timestamp was updated.
- vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long modificationTimestampPostUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPostUpdate);
- Assert.assertTrue(creationTimestamp < modificationTimestampPostUpdate);
-
- // Update max's mentor reference to jane.
- maxEmployee.setAttribute("mentor", AtlasTypeUtil.getAtlasObjectId(janeEmployeeCreated));
- init();
- entityResult = entityStore.createOrUpdate(new AtlasEntityStream(maxEmployee), false);
- assertEquals(entityResult.getUpdatedEntities().size(), 1);
- assertTrue(extractGuids(entityResult.getUpdatedEntities()).contains(maxGuid));
-
- // Verify the update was applied correctly - jane should now be max's mentor.
- max = metadataService.getEntityDefinition(maxGuid);
- refTarget = (ITypedReferenceableInstance) max.get("mentor");
- Assert.assertEquals(refTarget.getId()._getId(), janeEmployeeCreated.getGuid());
-
- // Verify modification timestamp was updated.
- vertex = GraphHelper.getInstance().getVertexForGUID(maxGuid);
- Long modificationTimestampPost2ndUpdate = GraphHelper.getSingleValuedProperty(vertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY, Long.class);
- Assert.assertNotNull(modificationTimestampPost2ndUpdate);
- Assert.assertTrue(modificationTimestampPostUpdate < modificationTimestampPost2ndUpdate);
-
- ITypedReferenceableInstance julius = metadataService.getEntityDefinition(juliusEmployeeCreated.getGuid());
- Id juliusId = julius.getId();
-
- init();
- maxEmployee.setAttribute("manager", AtlasTypeUtil.getAtlasObjectId(juliusEmployeeCreated));
- entityResult = entityStore.createOrUpdate(new AtlasEntityStream(maxEmployee), false);
- assertEquals(entityResult.getUpdatedEntities().size(), 3);
- List<String> updatedGuids = extractGuids(entityResult.getUpdatedEntities());
- assertTrue(updatedGuids.contains(maxGuid));
- assertTrue(updatedGuids.contains(janeEmployeeCreated.getGuid()));
- // Should have updated julius to add max in subordinates list.
- assertTrue(updatedGuids.contains(juliusEmployeeCreated.getGuid()));
-
- // Verify the update was applied correctly - julius should now be max's manager and max should be julius' subordinate.
- max = metadataService.getEntityDefinition(maxGuid);
- refTarget = (ITypedReferenceableInstance) max.get("manager");
- Assert.assertEquals(refTarget.getId()._getId(), juliusId._getId());
- julius = metadataService.getEntityDefinition(juliusId._getId());
- Object value = julius.get("subordinates");
- Assert.assertTrue(value instanceof List);
- List<ITypedReferenceableInstance> refList = (List<ITypedReferenceableInstance>) value;
- Assert.assertEquals(refList.size(), 1);
- Assert.assertEquals(refList.get(0).getId()._getId(), maxGuid);
-
- assertTestUpdateEntity_MultiplicityOneNonCompositeReference(janeEmployeeCreated.getGuid());
- }
-
- private Map<String, String> getEmployeeNameGuidMap(final ITypedReferenceableInstance hrDept) throws AtlasException {
- Object refValue = hrDept.get("employees");
- Assert.assertTrue(refValue instanceof List);
- List<Object> employees = (List<Object>)refValue;
- Assert.assertEquals(employees.size(), 4);
- Map<String, String> nameGuidMap = new HashMap<String, String>() {{
- put("hr", hrDept.getId()._getId());
- }};
-
- for (Object listValue : employees) {
- Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
- nameGuidMap.put((String)employee.get("name"), employee.getId()._getId());
- }
- return nameGuidMap;
- }
-
-
- private AtlasEntity getEmployeeByName(AtlasEntity.AtlasEntitiesWithExtInfo hrDept, String name) {
- for (AtlasEntity entity : hrDept.getEntities()) {
- if ( name.equals(entity.getAttribute(NAME))) {
- return entity;
- }
- }
- return null;
- }
-
- protected abstract void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(String janeGuid) throws Exception;
-
- /**
- * Verify deleting an entity which is contained by another
- * entity through a bi-directional composite reference.
- *
- * @throws Exception
- */
- @Test
- public void testDisconnectBidirectionalReferences() throws Exception {
- AtlasEntity.AtlasEntitiesWithExtInfo hrDept = TestUtilsV2.createDeptEg2();
- init();
- final EntityMutationResponse hrDeptCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(hrDept), false);
-
- final AtlasEntityHeader deptCreated = hrDeptCreationResponse.getFirstCreatedEntityByTypeName(DEPARTMENT_TYPE);
- final AtlasEntityHeader maxEmployee = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.EMPLOYEE_TYPE, NAME, "Max");
- final AtlasEntityHeader johnEmployee = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.EMPLOYEE_TYPE, NAME, "John");
- final AtlasEntityHeader janeEmployee = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.MANAGER_TYPE, NAME, "Jane");
- final AtlasEntityHeader juliusEmployee = hrDeptCreationResponse.getCreatedEntityByTypeNameAndAttribute(TestUtilsV2.MANAGER_TYPE, NAME, "Julius");
-
- ITypedReferenceableInstance hrDeptInstance = metadataService.getEntityDefinition(deptCreated.getGuid());
- Map<String, String> nameGuidMap = getEmployeeNameGuidMap(hrDeptInstance);
-
- // Verify that Max is one of Jane's subordinates.
- ITypedReferenceableInstance jane = metadataService.getEntityDefinition(janeEmployee.getGuid());
- Object refValue = jane.get("subordinates");
- Assert.assertTrue(refValue instanceof List);
- List<Object> subordinates = (List<Object>)refValue;
- Assert.assertEquals(subordinates.size(), 2);
- List<String> subordinateIds = new ArrayList<>(2);
- for (Object listValue : subordinates) {
- Assert.assertTrue(listValue instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance employee = (ITypedReferenceableInstance) listValue;
- subordinateIds.add(employee.getId()._getId());
- }
- Assert.assertTrue(subordinateIds.contains(maxEmployee.getGuid()));
-
- init();
- EntityMutationResponse entityResult = entityStore.deleteById(maxEmployee.getGuid());
- ITypedReferenceableInstance john = metadataService.getEntityDefinitionReference(TestUtilsV2.EMPLOYEE_TYPE, NAME, "John");
-
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertEquals(entityResult.getDeletedEntities().get(0).getGuid(), maxEmployee.getGuid());
- assertEquals(entityResult.getUpdatedEntities().size(), 3);
-
- assertEquals(extractGuids(entityResult.getUpdatedEntities()), Arrays.asList(janeEmployee.getGuid(), deptCreated.getGuid(), johnEmployee.getGuid()));
- assertEntityDeleted(maxEmployee.getGuid());
-
- assertMaxForTestDisconnectBidirectionalReferences(nameGuidMap);
-
- // Now delete jane - this should disconnect the manager reference from her
- // subordinate.
- init();
- entityResult = entityStore.deleteById(janeEmployee.getGuid());
- assertEquals(entityResult.getDeletedEntities().size(), 1);
- assertEquals(entityResult.getDeletedEntities().get(0).getGuid(), janeEmployee.getGuid());
- assertEquals(entityResult.getUpdatedEntities().size(), 2);
- assertEquals(extractGuids(entityResult.getUpdatedEntities()), Arrays.asList(deptCreated.getGuid(), johnEmployee.getGuid()));
-
- assertEntityDeleted(janeEmployee.getGuid());
-
- final AtlasEntity.AtlasEntityWithExtInfo johnUpdated = entityStore.getById(johnEmployee.getGuid());
- assertJohnForTestDisconnectBidirectionalReferences(johnUpdated, janeEmployee.getGuid());
- }
-
- protected List<String> extractGuids(final List<AtlasEntityHeader> updatedEntities) {
- List<String> guids = new ArrayList<>();
- for (AtlasEntityHeader header : updatedEntities ) {
- guids.add(header.getGuid());
- }
- return guids;
- }
-
- protected abstract void assertJohnForTestDisconnectBidirectionalReferences(AtlasEntity.AtlasEntityWithExtInfo john,
- String janeGuid) throws Exception;
-
- protected abstract void assertMaxForTestDisconnectBidirectionalReferences(Map<String, String> nameGuidMap)
- throws Exception;
-
- protected abstract void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- List<AtlasObjectId> columns, String columnGuid) throws AtlasBaseException;
-
- /**
- * Verify deleting entities that are the target of a unidirectional class array reference
- * from a struct or trait instance.
- */
- @Test
- public void testDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes() throws Exception {
- // Define class types.
- AtlasStructDef.AtlasAttributeDef[] structTargetAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("attr1", "string",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())};
-
- AtlasEntityDef structTargetDef =
- new AtlasEntityDef("StructTarget", "StructTarget_description", "1.0",
- Arrays.asList(structTargetAttributes), Collections.<String>emptySet());
-
-
- AtlasStructDef.AtlasAttributeDef[] traitTargetAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("attr1", "string",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())};
-
- AtlasEntityDef traitTargetDef =
- new AtlasEntityDef("TraitTarget", "TraitTarget_description", "1.0",
- Arrays.asList(traitTargetAttributes), Collections.<String>emptySet());
-
- AtlasStructDef.AtlasAttributeDef[] structContainerAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("struct", "TestStruct",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())};
-
- AtlasEntityDef structContainerDef =
- new AtlasEntityDef("StructContainer", "StructContainer_description", "1.0",
- Arrays.asList(structContainerAttributes), Collections.<String>emptySet());
-
- // Define struct and trait types which have a unidirectional array reference
- // to a class type.
- AtlasStructDef.AtlasAttributeDef[] structDefAttributes = new AtlasStructDef.AtlasAttributeDef[] {
- new AtlasStructDef.AtlasAttributeDef("target", "array<StructTarget>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList()),
-
- new AtlasStructDef.AtlasAttributeDef("nestedStructs", "array<NestedStruct>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList()) };
-
- AtlasStructDef structDef = new AtlasStructDef("TestStruct", "TestStruct_desc", "1.0", Arrays.asList(structDefAttributes));
-
-
- // Define struct and trait types which have a unidirectional array reference
- // to a class type.
- AtlasStructDef.AtlasAttributeDef[] nestedStructDefAttributes = new AtlasStructDef.AtlasAttributeDef[] {
- new AtlasStructDef.AtlasAttributeDef("attr1", "string",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList()),
-
- new AtlasStructDef.AtlasAttributeDef("target", "array<TraitTarget>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList()) };
-
- AtlasStructDef nestedStructDef = new AtlasStructDef("NestedStruct", "NestedStruct_desc", "1.0", Arrays.asList(nestedStructDefAttributes));
-
- AtlasStructDef.AtlasAttributeDef[] traitDefAttributes = new AtlasStructDef.AtlasAttributeDef[] {
- new AtlasStructDef.AtlasAttributeDef("target", "array<TraitTarget>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())
- };
-
- AtlasClassificationDef traitDef = new AtlasClassificationDef("TestTrait", "TestTrait_desc", "1.0", Arrays.asList(traitDefAttributes));
-
- AtlasTypesDef typesDef = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(structDef, nestedStructDef),
- ImmutableList.<AtlasClassificationDef>of(traitDef),
- ImmutableList.<AtlasEntityDef>of(structTargetDef, traitTargetDef, structContainerDef));
-
- typeDefStore.createTypesDef(typesDef);
-
- // Create instances of class, struct, and trait types.
- final AtlasEntity structTargetEntity = new AtlasEntity("StructTarget");
- final AtlasEntity traitTargetEntity = new AtlasEntity("TraitTarget");
- final AtlasEntity structContainerEntity = new AtlasEntity("StructContainer");
- AtlasStruct structInstance = new AtlasStruct("TestStruct");
- AtlasStruct nestedStructInstance = new AtlasStruct("NestedStruct");
- Struct traitInstance = new Struct("TestTrait");
- structContainerEntity.setAttribute("struct", structInstance);
- structInstance.setAttribute("target", ImmutableList.of(AtlasTypeUtil.getAtlasObjectId(structTargetEntity)));
- structInstance.setAttribute("nestedStructs", ImmutableList.of(nestedStructInstance));
-
- AtlasEntity.AtlasEntitiesWithExtInfo structCreationObj = new AtlasEntity.AtlasEntitiesWithExtInfo();
- structCreationObj.addEntity(structContainerEntity);
- structCreationObj.addEntity(traitTargetEntity);
- structCreationObj.addReferredEntity(structTargetEntity);
-
- init();
-
- AtlasEntityStream entityStream = new AtlasEntityStream(structCreationObj);
-
- EntityMutationResponse response = entityStore.createOrUpdate(entityStream, false);
- Assert.assertEquals(response.getCreatedEntities().size(), 3);
-
- final List<String> structTarget = metadataService.getEntityList("StructTarget");
- Assert.assertEquals(structTarget.size(), 1);
- final String structTargetGuid = structTarget.get(0);
-
- final List<String> traitTarget = metadataService.getEntityList("TraitTarget");
- Assert.assertEquals(traitTarget.size(), 1);
- final String traitTargetGuid = traitTarget.get(0);
-
- final List<String> structContainerTarget = metadataService.getEntityList("StructContainer");
- Assert.assertEquals(structContainerTarget.size(), 1);
- String structContainerGuid = structContainerTarget.get(0);
-
- // Add TestTrait to StructContainer instance
- traitInstance.set("target", ImmutableList.of(new Id(traitTargetGuid, 0, "TraitTarget")));
- TraitType traitType = typeSystem.getDataType(TraitType.class, "TestTrait");
- ITypedStruct convertedTrait = traitType.convert(traitInstance, Multiplicity.REQUIRED);
- metadataService.addTrait(structContainerGuid, convertedTrait);
-
- // Verify that the unidirectional references from the struct and trait instances
- // are pointing at the target entities.
- final ITypedReferenceableInstance structContainerConvertedEntity = metadataService.getEntityDefinition(structContainerGuid);
- Object object = structContainerConvertedEntity.get("struct");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof ITypedStruct);
- ITypedStruct struct = (ITypedStruct) object;
- object = struct.get("target");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refList = (List<ITypedReferenceableInstance>)object;
- Assert.assertEquals(refList.size(), 1);
- Assert.assertEquals(refList.get(0).getId()._getId(), structTargetGuid);
-
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- Assert.assertNotNull(trait);
- object = trait.get("target");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof List);
- refList = (List<ITypedReferenceableInstance>)object;
- Assert.assertEquals(refList.size(), 1);
- Assert.assertEquals(refList.get(0).getId()._getId(), traitTargetGuid);
-
- init();
- // Delete the entities that are targets of the struct and trait instances.
- EntityMutationResponse entityResult = entityStore.deleteByIds(new ArrayList<String>() {{
- add(structTargetGuid);
- add(traitTargetGuid);
- }});
- Assert.assertEquals(entityResult.getDeletedEntities().size(), 2);
- Assert.assertTrue(extractGuids(entityResult.getDeletedEntities()).containsAll(Arrays.asList(structTargetGuid, traitTargetGuid)));
- assertEntityDeleted(structTargetGuid);
- assertEntityDeleted(traitTargetGuid);
-
- assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(structContainerGuid);
-
- init();
- // Delete the entity which contains nested structs and has the TestTrait trait.
- entityResult = entityStore.deleteById(structContainerGuid);
- Assert.assertEquals(entityResult.getDeletedEntities().size(), 1);
- Assert.assertTrue(extractGuids(entityResult.getDeletedEntities()).contains(structContainerGuid));
- assertEntityDeleted(structContainerGuid);
-
- // Verify all TestStruct struct vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestStruct"));
-
- // Verify all NestedStruct struct vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "NestedStruct"));
-
- // Verify all TestTrait trait vertices were removed.
- assertVerticesDeleted(getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, "TestTrait"));
- }
-
-
- /**
- * Verify deleting entities that are the target of class map references.
- */
- @Test
- public void testDisconnectMapReferenceFromClassType() throws Exception {
- // Define type for map value.
- AtlasStructDef.AtlasAttributeDef[] mapValueAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("biMapOwner", "MapOwner",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- new ArrayList<AtlasStructDef.AtlasConstraintDef>() {{
- add(new AtlasStructDef.AtlasConstraintDef(
- AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF, new HashMap<String, Object>() {{
- put(AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE, "biMap");
- }}));
- }})};
-
- AtlasEntityDef mapValueContainerDef =
- new AtlasEntityDef("MapValue", "MapValue_desc", "1.0",
- Arrays.asList(mapValueAttributes), Collections.<String>emptySet());
-
- // Define type with unidirectional and bidirectional map references,
- // where the map value is a class reference to MapValue.
-
- AtlasStructDef.AtlasAttributeDef[] mapOwnerAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("map", "map<string,MapValue>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList()),
- new AtlasStructDef.AtlasAttributeDef("biMap", "map<string,MapValue>",
- true,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 0, 1,
- false, false,
- new ArrayList<AtlasStructDef.AtlasConstraintDef>() {{
- add(new AtlasStructDef.AtlasConstraintDef(
- AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_INVERSE_REF, new HashMap<String, Object>() {{
- put(AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE, "biMapOwner");
- }}));
- }})};
-
- AtlasEntityDef mapOwnerContainerDef =
- new AtlasEntityDef("MapOwner", "MapOwner_desc", "1.0",
- Arrays.asList(mapOwnerAttributes), Collections.<String>emptySet());
-
- AtlasTypesDef typesDef = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.<AtlasEntityDef>of(mapValueContainerDef, mapOwnerContainerDef));
-
- typeDefStore.createTypesDef(typesDef);
-
- // Create instances of MapOwner and MapValue.
- // Set MapOwner.map and MapOwner.biMap with one entry that references MapValue instance.
- AtlasEntity mapOwnerInstance = new AtlasEntity("MapOwner");
- AtlasEntity mapValueInstance = new AtlasEntity("MapValue");
-
- mapOwnerInstance.setAttribute("map", Collections.singletonMap("value1", AtlasTypeUtil.getAtlasObjectId(mapValueInstance)));
- mapOwnerInstance.setAttribute("biMap", Collections.singletonMap("value1", AtlasTypeUtil.getAtlasObjectId(mapValueInstance)));
- // Set biMapOwner reverse reference on MapValue.
- mapValueInstance.setAttribute("biMapOwner", AtlasTypeUtil.getAtlasObjectId(mapOwnerInstance));
-
- AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo();
- entities.addReferredEntity(mapValueInstance);
- entities.addEntity(mapOwnerInstance);
-
- final EntityMutationResponse response = entityStore.createOrUpdate(new AtlasEntityStream(entities), false);
- Assert.assertEquals(response.getCreatedEntities().size(), 2);
- final List<AtlasEntityHeader> mapOwnerCreated = response.getCreatedEntitiesByTypeName("MapOwner");
- AtlasEntity.AtlasEntityWithExtInfo mapOwnerEntity = entityStore.getById(mapOwnerCreated.get(0).getGuid());
-
- String edgeLabel = AtlasGraphUtilsV1.getAttributeEdgeLabel(typeRegistry.getEntityTypeByName("MapOwner"), "map");
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
-
- // Verify MapOwner.map attribute has expected value.
- String mapValueGuid = null;
- AtlasVertex mapOwnerVertex = null;
- for (String mapAttrName : Arrays.asList("map", "biMap")) {
- Object object = mapOwnerEntity.getEntity().getAttribute(mapAttrName);
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, AtlasObjectId> map = (Map<String, AtlasObjectId>)object;
- Assert.assertEquals(map.size(), 1);
- AtlasObjectId value1Id = map.get("value1");
- Assert.assertNotNull(value1Id);
- mapValueGuid = value1Id.getGuid();
- mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerEntity.getEntity().getGuid());
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
- }
-
- // Delete the map value instance.
- // This should disconnect the references from the map owner instance.
- entityStore.deleteById(mapValueGuid);
- assertEntityDeleted(mapValueGuid);
- assertTestDisconnectMapReferenceFromClassType(mapOwnerEntity.getEntity().getGuid());
- }
-
- protected abstract void assertTestDisconnectMapReferenceFromClassType(String mapOwnerGuid) throws Exception;
-
- @Test
- public void testDeleteByUniqueAttribute() throws Exception {
- // Create a table entity, with 3 composite column entities
- init();
- final AtlasEntity dbEntity = TestUtilsV2.createDBEntity();
- EntityMutationResponse dbCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(dbEntity), false);
-
- final AtlasEntity tableEntity = TestUtilsV2.createTableEntity(dbEntity);
- AtlasEntity.AtlasEntitiesWithExtInfo entitiesInfo = new AtlasEntity.AtlasEntitiesWithExtInfo(tableEntity);
-
- final AtlasEntity columnEntity1 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity1);
- final AtlasEntity columnEntity2 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity2);
- final AtlasEntity columnEntity3 = TestUtilsV2.createColumnEntity(tableEntity);
- entitiesInfo.addReferredEntity(columnEntity3);
-
- tableEntity.setAttribute(COLUMNS_ATTR_NAME, Arrays.asList(AtlasTypeUtil.getAtlasObjectId(columnEntity1),
- AtlasTypeUtil.getAtlasObjectId(columnEntity2),
- AtlasTypeUtil.getAtlasObjectId(columnEntity3)));
-
- init();
-
- final EntityMutationResponse tblCreationResponse = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
-
- final AtlasEntityHeader column1Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity1.getAttribute(NAME));
- final AtlasEntityHeader column2Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity2.getAttribute(NAME));
- final AtlasEntityHeader column3Created = tblCreationResponse.getCreatedEntityByTypeNameAndAttribute(COLUMN_TYPE, NAME, (String) columnEntity3.getAttribute(NAME));
-
- // Retrieve the table entities from the Repository, to get their guids and the composite column guids.
- ITypedReferenceableInstance tableInstance = metadataService.getEntityDefinitionReference(TestUtils.TABLE_TYPE, NAME, (String) tableEntity.getAttribute(NAME));
- List<IReferenceableInstance> columns = (List<IReferenceableInstance>) tableInstance.get(COLUMNS_ATTR_NAME);
-
- //Delete column
- String colId = columns.get(0).getId()._getId();
- String tableId = tableInstance.getId()._getId();
-
- init();
-
- Map<String, Object> uniqueAttrs = new HashMap<>();
- uniqueAttrs.put(NAME, column1Created.getAttribute(NAME));
-
- AtlasEntityType columnType = typeRegistry.getEntityTypeByName(COLUMN_TYPE);
- EntityMutationResponse deletionResponse = entityStore.deleteByUniqueAttributes(columnType, uniqueAttrs);
- assertEquals(deletionResponse.getDeletedEntities().size(), 1);
- assertEquals(deletionResponse.getDeletedEntities().get(0).getGuid(), colId);
- assertEquals(deletionResponse.getUpdatedEntities().size(), 1);
- assertEquals(deletionResponse.getUpdatedEntities().get(0).getGuid(), tableId);
- assertEntityDeleted(colId);
- }
-
- @Test
- public void testDeleteEntitiesWithCompositeMapReference() throws Exception {
- // Create instances of MapOwner and MapValue.
- // Set MapOwner.map with one entry that references MapValue instance.
- AtlasEntity.AtlasEntityWithExtInfo entityDefinition = createMapOwnerAndValueEntities();
- String mapOwnerGuid = entityDefinition.getEntity().getGuid();
-
- // Verify MapOwner.map attribute has expected value.
- AtlasEntity.AtlasEntityWithExtInfo mapOwnerInstance = entityStore.getById(mapOwnerGuid);
- Object object = mapOwnerInstance.getEntity().getAttribute("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, AtlasObjectId> map = (Map<String, AtlasObjectId>)object;
- Assert.assertEquals(map.size(), 1);
- AtlasObjectId mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- String mapValueGuid = mapValueInstance.getGuid();
- String edgeLabel = AtlasGraphUtilsV1.getAttributeEdgeLabel(compositeMapOwnerType, "map");
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
-
- init();
- List<AtlasEntityHeader> deletedEntities = entityStore.deleteById(mapOwnerGuid).getDeletedEntities();
- Assert.assertEquals(deletedEntities.size(), 2);
- Assert.assertTrue(extractGuids(deletedEntities).contains(mapOwnerGuid));
- Assert.assertTrue(extractGuids(deletedEntities).contains(mapValueGuid));
-
- assertEntityDeleted(mapOwnerGuid);
- assertEntityDeleted(mapValueGuid);
- }
-
- @Test
- public void testDeleteTargetOfRequiredMapReference() throws Exception {
- // Define type for map value.
- AtlasEntityDef mapValueDef =
- new AtlasEntityDef("RequiredMapValue", "RequiredMapValue_description", "1.0",
- Collections.<AtlasStructDef.AtlasAttributeDef>emptyList(), Collections.<String>emptySet());
-
- AtlasStructDef.AtlasAttributeDef[] mapOwnerAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("map", "map<string,RequiredMapValue>",
- false,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 1, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())
- };
-
- AtlasEntityDef mapOwnerDef =
- new AtlasEntityDef("RequiredMapOwner", "RequiredMapOwner_description", "1.0",
- Arrays.asList(mapOwnerAttributes), Collections.<String>emptySet());
-
- AtlasTypesDef typesDef = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.<AtlasEntityDef>of(mapValueDef, mapOwnerDef));
-
- TestUtilsV2.populateSystemAttributes(typesDef);
-
- typeDefStore.createTypesDef(typesDef);
-
- AtlasEntityType mapOwnerType = typeRegistry.getEntityTypeByName("RequiredMapOwner");
- AtlasEntityType mapValueType = typeRegistry.getEntityTypeByName("RequiredMapValue");
-
- // Create instances of RequiredMapOwner and RequiredMapValue.
- // Set RequiredMapOwner.map with one entry that references RequiredMapValue instance.
- AtlasEntity mapOwnerInstance = new AtlasEntity(mapOwnerType.getTypeName());
- AtlasEntity mapValueInstance = new AtlasEntity(mapValueType.getTypeName());
- mapOwnerInstance.setAttribute("map", Collections.singletonMap("value1", AtlasTypeUtil.getAtlasObjectId(mapValueInstance)));
-
- AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo();
- entities.addReferredEntity(mapValueInstance);
- entities.addEntity(mapOwnerInstance);
-
- List<AtlasEntityHeader> createEntitiesResult = entityStore.createOrUpdate(new AtlasEntityStream(entities), false).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- List<String> guids = metadataService.getEntityList("RequiredMapOwner");
- Assert.assertEquals(guids.size(), 1);
- String mapOwnerGuid = guids.get(0);
- guids = metadataService.getEntityList("RequiredMapValue");
- Assert.assertEquals(guids.size(), 1);
- String mapValueGuid = guids.get(0);
-
- // Verify MapOwner.map attribute has expected value.
- final AtlasEntity.AtlasEntityWithExtInfo mapOwnerInstance1 = entityStore.getById(mapOwnerGuid);
- Object object = mapOwnerInstance1.getEntity().getAttribute("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, AtlasObjectId> map = (Map<String, AtlasObjectId>)object;
- Assert.assertEquals(map.size(), 1);
- AtlasObjectId mapValueInstance1 = map.get("value1");
- Assert.assertNotNull(mapValueInstance1);
- Assert.assertEquals(mapValueInstance1.getGuid(), mapValueGuid);
- String edgeLabel = AtlasGraphUtilsV1.getAttributeEdgeLabel(mapOwnerType, "map");
- String mapEntryLabel = edgeLabel + "." + "value1";
- AtlasEdgeLabel atlasEdgeLabel = new AtlasEdgeLabel(mapEntryLabel);
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- object = mapOwnerVertex.getProperty(atlasEdgeLabel.getQualifiedMapKey(), Object.class);
- Assert.assertNotNull(object);
-
- // Verify deleting the target of required map attribute throws a AtlasBaseException.
- try {
- entityStore.deleteById(mapValueGuid);
- Assert.fail(AtlasBaseException.class.getSimpleName() + " was expected but none thrown.");
- }
- catch (Exception e) {
- verifyExceptionThrown(e, AtlasBaseException.class);
- }
- }
-
- @Test
- public void testLowerBoundsIgnoredWhenDeletingCompositeEntitesOwnedByMap() throws Exception {
- // Define MapValueReferencer type with required reference to CompositeMapValue.
- AtlasStructDef.AtlasAttributeDef[] mapValueAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("refToMapValue", "CompositeMapValue",
- false,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 1, 1,
- false, false,
- Collections.<AtlasStructDef.AtlasConstraintDef>emptyList())
- };
-
- AtlasEntityDef mapValueDef =
- new AtlasEntityDef("MapValueReferencer", "RequiredMapValue_description", "1.0",
- Arrays.asList(mapValueAttributes), Collections.<String>emptySet());
-
-
- AtlasStructDef.AtlasAttributeDef[] mapContainerAttributes = new AtlasStructDef.AtlasAttributeDef[]{
- new AtlasStructDef.AtlasAttributeDef("requiredMap", "map<string,MapValueReferencer>",
- false,
- AtlasStructDef.AtlasAttributeDef.Cardinality.SINGLE, 1, 1,
- false, false,
- new ArrayList<AtlasStructDef.AtlasConstraintDef>() {{
- add(new AtlasStructDef.AtlasConstraintDef(AtlasStructDef.AtlasConstraintDef.CONSTRAINT_TYPE_OWNED_REF));
- }})
- };
-
- AtlasEntityDef mapContainerDef =
- new AtlasEntityDef("MapValueReferencerContainer", "MapValueReferencerContainer_description", "1.0",
- Arrays.asList(mapContainerAttributes), Collections.<String>emptySet());
-
-
- AtlasTypesDef typesDef = AtlasTypeUtil.getTypesDef(ImmutableList.<AtlasEnumDef>of(),
- ImmutableList.<AtlasStructDef>of(),
- ImmutableList.<AtlasClassificationDef>of(),
- ImmutableList.<AtlasEntityDef>of(mapValueDef, mapContainerDef));
-
- typeDefStore.createTypesDef(typesDef);
-
- // Create instances of CompositeMapOwner and CompositeMapValue.
- // Set MapOwner.map with one entry that references MapValue instance.
- AtlasEntity.AtlasEntityWithExtInfo entityDefinition = createMapOwnerAndValueEntities();
- String mapOwnerGuid = entityDefinition.getEntity().getGuid();
-
- // Verify MapOwner.map attribute has expected value.
- ITypedReferenceableInstance mapOwnerInstance = metadataService.getEntityDefinition(mapOwnerGuid);
- Object object = mapOwnerInstance.get("map");
- Assert.assertNotNull(object);
- Assert.assertTrue(object instanceof Map);
- Map<String, ITypedReferenceableInstance> map = (Map<String, ITypedReferenceableInstance>)object;
- Assert.assertEquals(map.size(), 1);
- ITypedReferenceableInstance mapValueInstance = map.get("value1");
- Assert.assertNotNull(mapValueInstance);
- String mapValueGuid = mapValueInstance.getId()._getId();
-
- // Create instance of MapValueReferencerContainer
- init();
- AtlasEntity mapValueReferencer = new AtlasEntity(mapValueDef.getName());
- mapValueReferencer.setAttribute("refToMapValue", new AtlasObjectId(mapValueInstance.getId()._getId(), mapValueInstance.getTypeName()));
- AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo();
- entities.addEntity(mapValueReferencer);
-
- List<AtlasEntityHeader> createEntitiesResult = entityStore.createOrUpdate(new AtlasEntityStream(entities), false).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 1);
-
- // Create instance of MapValueReferencer, and update mapValueReferencerContainer
- // to reference it.
- AtlasEntity mapValueReferenceContainer = new AtlasEntity(mapContainerDef.getName());
- entities = new AtlasEntity.AtlasEntitiesWithExtInfo();
- entities.addEntity(mapValueReferenceContainer);
- entities.addReferredEntity(mapValueReferencer);
- mapValueReferenceContainer.setAttribute("requiredMap", Collections.singletonMap("value1", AtlasTypeUtil.getAtlasObjectId(mapValueReferencer)));
-
-
- init();
- EntityMutationResponse updateEntitiesResult = entityStore.createOrUpdate(new AtlasEntityStream(entities), false);
-
- String mapValueReferencerContainerGuid = updateEntitiesResult.getCreatedEntitiesByTypeName("MapValueReferencerContainer").get(0).getGuid();
- String mapValueReferencerGuid = updateEntitiesResult.getUpdatedEntitiesByTypeName("MapValueReferencer").get(0).getGuid();
-
- Assert.assertEquals(updateEntitiesResult.getCreatedEntities().size(), 1);
- Assert.assertEquals(updateEntitiesResult.getUpdatedEntities().size(), 1);
- Assert.assertEquals(updateEntitiesResult.getUpdatedEntities().get(0).getGuid(), mapValueReferencerGuid);
-
-
- // Delete map owner and map referencer container. A total of 4 entities should be deleted,
- // including the composite entities. The lower bound constraint on MapValueReferencer.refToMapValue
- // should not be enforced on the composite MapValueReferencer since it is being deleted.
- EntityMutationResponse deleteEntitiesResult = entityStore.deleteByIds(Arrays.asList(mapOwnerGuid, mapValueReferencerContainerGuid));
- Assert.assertEquals(deleteEntitiesResult.getDeletedEntities().size(), 4);
- Assert.assertTrue(extractGuids(deleteEntitiesResult.getDeletedEntities()).containsAll(
- Arrays.asList(mapOwnerGuid, mapValueGuid, mapValueReferencerContainerGuid, mapValueReferencerGuid)));
- }
-
- private AtlasEntity.AtlasEntityWithExtInfo createMapOwnerAndValueEntities()
- throws AtlasException, AtlasBaseException {
-
- final AtlasEntity mapOwnerInstance = new AtlasEntity(compositeMapOwnerType.getTypeName());
- mapOwnerInstance.setAttribute(NAME, TestUtils.randomString());
- AtlasEntity mapValueInstance = new AtlasEntity(compositeMapValueType.getTypeName());
- mapValueInstance.setAttribute(NAME, TestUtils.randomString());
- mapOwnerInstance.setAttribute("map", Collections.singletonMap("value1", AtlasTypeUtil.getAtlasObjectId(mapValueInstance)));
-
- AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo();
- entities.addReferredEntity(mapValueInstance);
- entities.addEntity(mapOwnerInstance);
-
- List<AtlasEntityHeader> createEntitiesResult = entityStore.createOrUpdate(new AtlasEntityStream(entities), false).getCreatedEntities();
- Assert.assertEquals(createEntitiesResult.size(), 2);
- AtlasEntity.AtlasEntityWithExtInfo entityDefinition = entityStore.getByUniqueAttributes(compositeMapOwnerType,
- new HashMap<String, Object>() {{
- put(NAME, mapOwnerInstance.getAttribute(NAME));
- }});
- return entityDefinition;
- }
-
-
- protected abstract void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(
- String structContainerGuid) throws Exception;
-
- protected abstract void assertVerticesDeleted(List<AtlasVertex> vertices);
-
- protected List<AtlasVertex> getVertices(String propertyName, Object value) {
- AtlasGraph graph = TestUtils.getGraph();
- Iterable<AtlasVertex> vertices = graph.getVertices(propertyName, value);
- List<AtlasVertex> list = new ArrayList<>();
- for (AtlasVertex vertex : vertices) {
- list.add(vertex);
- }
- return list;
- }
-
- /**
- * Search exception cause chain for specified exception.
- *
- * @param thrown root of thrown exception chain
- * @param expected class of expected exception
- */
- private void verifyExceptionThrown(Exception thrown, Class expected) {
-
- boolean exceptionFound = false;
- Throwable cause = thrown;
- while (cause != null) {
- if (expected.isInstance(cause)) {
- // good
- exceptionFound = true;
- break;
- }
- else {
- cause = cause.getCause();
- }
- }
- if (!exceptionFound) {
- Assert.fail(expected.getSimpleName() + " was expected but not thrown", thrown);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityDefStoreV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityDefStoreV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityDefStoreV1Test.java
index 95b5761..1198283 100644
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityDefStoreV1Test.java
+++ b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityDefStoreV1Test.java
@@ -17,7 +17,6 @@
*/
package org.apache.atlas.repository.store.graph.v1;
-import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasErrorCode;
@@ -33,6 +32,8 @@ import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
+import java.util.Collections;
+
/**
* Tests for AtlasEntityStoreV1
*/
@@ -46,7 +47,7 @@ public class AtlasEntityDefStoreV1Test {
@DataProvider
public Object[][] invalidAttributeNameWithReservedKeywords(){
AtlasEntityDef invalidAttrNameType =
- AtlasTypeUtil.createClassTypeDef("Invalid_Attribute_Type", "description", ImmutableSet.<String>of(),
+ AtlasTypeUtil.createClassTypeDef("Invalid_Attribute_Type", "description", Collections.emptySet(),
AtlasTypeUtil.createRequiredAttrDef("order", "string"),
AtlasTypeUtil.createRequiredAttrDef("limit", "string"));
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java
index ef8bd2f..fd1b6db 100644
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java
+++ b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasEntityStoreV1Test.java
@@ -22,7 +22,6 @@ import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.TestModules;
import org.apache.atlas.RequestContextV1;
-import org.apache.atlas.TestUtils;
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasClassification;
@@ -45,7 +44,6 @@ import org.apache.atlas.repository.graph.AtlasGraphProvider;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.store.bootstrap.AtlasTypeDefStoreInitializer;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
-import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasMapType;
@@ -72,10 +70,10 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
-import static org.apache.atlas.TestUtils.COLUMN_TYPE;
-import static org.apache.atlas.TestUtils.NAME;
-import static org.apache.atlas.TestUtils.randomString;
+import static org.apache.atlas.TestUtilsV2.COLUMNS_ATTR_NAME;
+import static org.apache.atlas.TestUtilsV2.COLUMN_TYPE;
+import static org.apache.atlas.TestUtilsV2.NAME;
+import static org.apache.atlas.TestUtilsV2.randomString;
import static org.apache.atlas.TestUtilsV2.STORAGE_DESC_TYPE;
import static org.apache.atlas.TestUtilsV2.TABLE_TYPE;
import static org.mockito.Mockito.mock;
@@ -96,9 +94,6 @@ public class AtlasEntityStoreV1Test {
AtlasEntityStore entityStore;
@Inject
- MetadataService metadataService;
-
- @Inject
DeleteHandlerV1 deleteHandler;
private AtlasEntitiesWithExtInfo deptEntity;
@@ -116,7 +111,6 @@ public class AtlasEntityStoreV1Test {
RequestContextV1.clear();
RequestContextV1.get().setUser(TestUtilsV2.TEST_USER);
- metadataService = TestUtils.addSessionCleanupWrapper(metadataService);
new GraphBackedSearchIndexer(typeRegistry);
AtlasTypesDef[] testTypesDefs = new AtlasTypesDef[] { TestUtilsV2.defineDeptEmployeeTypes(),
@@ -312,7 +306,7 @@ public class AtlasEntityStoreV1Test {
AtlasEntity tableEntity = new AtlasEntity(tblEntity.getEntity());
AtlasEntitiesWithExtInfo entitiesInfo = new AtlasEntitiesWithExtInfo(tableEntity);
Map<String, AtlasStruct> partsMap = new HashMap<>();
- partsMap.put("part0", new AtlasStruct(TestUtils.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test"));
+ partsMap.put("part0", new AtlasStruct(TestUtilsV2.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test"));
tableEntity.setAttribute("partitionsMap", partsMap);
@@ -326,7 +320,7 @@ public class AtlasEntityStoreV1Test {
Assert.assertTrue(partsMap.get("part0").equals(((Map<String, AtlasStruct>) updatedTableDef1.getAttribute("partitionsMap")).get("part0")));
//update map - add a map key
- partsMap.put("part1", new AtlasStruct(TestUtils.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test1"));
+ partsMap.put("part1", new AtlasStruct(TestUtilsV2.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test1"));
tableEntity.setAttribute("partitionsMap", partsMap);
init();
@@ -341,7 +335,7 @@ public class AtlasEntityStoreV1Test {
//update map - remove a key and add another key
partsMap.remove("part0");
- partsMap.put("part2", new AtlasStruct(TestUtils.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test2"));
+ partsMap.put("part2", new AtlasStruct(TestUtilsV2.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "test2"));
tableEntity.setAttribute("partitionsMap", partsMap);
init();
@@ -383,7 +377,7 @@ public class AtlasEntityStoreV1Test {
init();
entityStore.createOrUpdate(new AtlasEntityStream(col0WithExtendedInfo), false);
- AtlasEntity col1 = new AtlasEntity(TestUtils.COLUMN_TYPE, TestUtilsV2.NAME, "test2");
+ AtlasEntity col1 = new AtlasEntity(TestUtilsV2.COLUMN_TYPE, TestUtilsV2.NAME, "test2");
col1.setAttribute("type", "string");
col1.setAttribute("table", AtlasTypeUtil.getAtlasObjectId(tableEntity));
@@ -398,7 +392,7 @@ public class AtlasEntityStoreV1Test {
columnsMap.put("col0", AtlasTypeUtil.getAtlasObjectId(col0));
columnsMap.put("col1", AtlasTypeUtil.getAtlasObjectId(col1));
- tableEntity.setAttribute(TestUtils.COLUMNS_MAP, columnsMap);
+ tableEntity.setAttribute(TestUtilsV2.COLUMNS_MAP, columnsMap);
entitiesInfo.addReferredEntity(col0);
entitiesInfo.addReferredEntity(col1);
@@ -412,7 +406,7 @@ public class AtlasEntityStoreV1Test {
columnsMap.put("col0", AtlasTypeUtil.getAtlasObjectId(col1));
columnsMap.put("col1", AtlasTypeUtil.getAtlasObjectId(col0));
- tableEntity.setAttribute(TestUtils.COLUMNS_MAP, columnsMap);
+ tableEntity.setAttribute(TestUtilsV2.COLUMNS_MAP, columnsMap);
init();
response = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
AtlasEntityHeader tableDefinition6 = response.getFirstUpdatedEntityByTypeName(TABLE_TYPE);
@@ -431,7 +425,7 @@ public class AtlasEntityStoreV1Test {
validateEntity(entitiesInfo, getEntityFromStore(tableDefinition7));
//Clear state
- tableEntity.setAttribute(TestUtils.COLUMNS_MAP, null);
+ tableEntity.setAttribute(TestUtilsV2.COLUMNS_MAP, null);
init();
response = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
AtlasEntityHeader tableDefinition8 = response.getFirstUpdatedEntityByTypeName(TABLE_TYPE);
@@ -481,7 +475,7 @@ public class AtlasEntityStoreV1Test {
validateEntity(entitiesInfo, getEntityFromStore(updatedTable));
//add a new element to array of struct
- partitions.add(new AtlasStruct(TestUtils.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "part3"));
+ partitions.add(new AtlasStruct(TestUtilsV2.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "part3"));
init();
response = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
updatedTable = response.getFirstUpdatedEntityByTypeName(TABLE_TYPE);
@@ -503,7 +497,7 @@ public class AtlasEntityStoreV1Test {
//add a repeated element to array of struct
- partitions.add(new AtlasStruct(TestUtils.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "part4"));
+ partitions.add(new AtlasStruct(TestUtilsV2.PARTITION_STRUCT_TYPE, TestUtilsV2.NAME, "part4"));
init();
response = entityStore.createOrUpdate(new AtlasEntityStream(entitiesInfo), false);
updatedTable = response.getFirstUpdatedEntityByTypeName(TABLE_TYPE);
@@ -524,7 +518,7 @@ public class AtlasEntityStoreV1Test {
AtlasEntity tableEntity = new AtlasEntity(tblEntity.getEntity());
AtlasEntitiesWithExtInfo entitiesInfo = new AtlasEntitiesWithExtInfo(tableEntity);
- AtlasStruct serdeInstance = new AtlasStruct(TestUtils.SERDE_TYPE, TestUtilsV2.NAME, "serde1Name");
+ AtlasStruct serdeInstance = new AtlasStruct(TestUtilsV2.SERDE_TYPE, TestUtilsV2.NAME, "serde1Name");
serdeInstance.setAttribute("serde", "test");
serdeInstance.setAttribute("description", "testDesc");
tableEntity.setAttribute("serde1", serdeInstance);
@@ -564,7 +558,7 @@ public class AtlasEntityStoreV1Test {
init();
Map<String, AtlasEntity> tableCloneMap = new HashMap<>();
AtlasEntity tableClone = new AtlasEntity(tblEntity.getEntity());
- tableClone.setAttribute("database", new AtlasObjectId(dbCreated.getGuid(), TestUtils.DATABASE_TYPE));
+ tableClone.setAttribute("database", new AtlasObjectId(dbCreated.getGuid(), TestUtilsV2.DATABASE_TYPE));
tableCloneMap.put(dbCreated.getGuid(), databaseInstance);
tableCloneMap.put(tableClone.getGuid(), tableClone);
@@ -628,7 +622,7 @@ public class AtlasEntityStoreV1Test {
//TODO : Failing in typedef creation
public void testSpecialCharacters() throws Exception {
//Verify that type can be created with reserved characters in typename, attribute name
- final String typeName = TestUtils.randomString(10);
+ final String typeName = TestUtilsV2.randomString(10);
String strAttrName = randomStrWithReservedChars();
String arrayAttrName = randomStrWithReservedChars();
String mapAttrName = randomStrWithReservedChars();
@@ -670,7 +664,7 @@ public class AtlasEntityStoreV1Test {
//Update required attribute
Map<String, AtlasEntity> tableCloneMap = new HashMap<>();
AtlasEntity tableEntity = new AtlasEntity(TABLE_TYPE);
- tableEntity.setAttribute(TestUtilsV2.NAME, "table_" + TestUtils.randomString());
+ tableEntity.setAttribute(TestUtilsV2.NAME, "table_" + TestUtilsV2.randomString());
tableCloneMap.put(tableEntity.getGuid(), tableEntity);
entityStore.createOrUpdate(new InMemoryMapEntityStream(tableCloneMap), false);
@@ -684,7 +678,7 @@ public class AtlasEntityStoreV1Test {
init();
AtlasEntity dbEntity = new AtlasEntity(TestUtilsV2.DATABASE_TYPE);
- dbEntity.setAttribute("name", TestUtils.randomString(10));
+ dbEntity.setAttribute("name", TestUtilsV2.randomString(10));
dbEntity.setAttribute("description", "us db");
dbEntity.setAttribute("isReplicated", false);
dbEntity.setAttribute("created", "09081988");
@@ -730,7 +724,7 @@ public class AtlasEntityStoreV1Test {
// create a new table type
AtlasEntity tblEntity = new AtlasEntity(TABLE_TYPE);
- tblEntity.setAttribute("name", TestUtils.randomString(10));
+ tblEntity.setAttribute("name", TestUtilsV2.randomString(10));
tblEntity.setAttribute("type", "type");
tblEntity.setAttribute("tableType", "MANAGED");
tblEntity.setAttribute("database", AtlasTypeUtil.getAtlasObjectId(updatedDbEntity));
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipStoreV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipStoreV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipStoreV1Test.java
index 8c58a92..d207a69 100644
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipStoreV1Test.java
+++ b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/AtlasRelationshipStoreV1Test.java
@@ -56,7 +56,7 @@ import static org.apache.atlas.TestRelationshipUtilsV2.EMPLOYEE_TYPE;
import static org.apache.atlas.TestRelationshipUtilsV2.getDepartmentEmployeeInstances;
import static org.apache.atlas.TestRelationshipUtilsV2.getDepartmentEmployeeTypes;
import static org.apache.atlas.TestRelationshipUtilsV2.getInverseReferenceTestTypes;
-import static org.apache.atlas.TestUtils.NAME;
+import static org.apache.atlas.TestUtilsV2.NAME;
import static org.apache.atlas.type.AtlasTypeUtil.getAtlasObjectId;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/HardDeleteHandlerV1Test.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/HardDeleteHandlerV1Test.java b/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/HardDeleteHandlerV1Test.java
deleted file mode 100644
index 407ebff..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/store/graph/v1/HardDeleteHandlerV1Test.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.store.graph.v1;
-
-
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.model.instance.AtlasObjectId;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.atlas.TestUtils.COLUMNS_ATTR_NAME;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertFalse;
-import static org.testng.Assert.assertNull;
-import static org.testng.Assert.fail;
-import static org.testng.AssertJUnit.assertNotNull;
-
-@Guice(modules = TestModules.HardDeleteModule.class)
-public class HardDeleteHandlerV1Test extends AtlasDeleteHandlerV1Test {
-
-
- @Override
- protected void assertTableForTestDeleteReference(String tableId) {
- //entity is deleted. So, no assertions
- }
-
- @Override
- protected void assertColumnForTestDeleteReference(final AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws AtlasBaseException {
- List<AtlasObjectId> columns = (List<AtlasObjectId>) tableInstance.getEntity().getAttribute(COLUMNS_ATTR_NAME);
- assertNull(columns);
- }
-
- @Override
- protected void assertProcessForTestDeleteReference(final AtlasEntityHeader processInstance) throws Exception {
- //assert that outputs is empty
- ITypedReferenceableInstance newProcess =
- metadataService.getEntityDefinition(processInstance.getGuid());
- assertNull(newProcess.get(AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS));
- }
-
- @Override
- protected void assertEntityDeleted(String id) throws Exception {
- try {
- entityStore.getById(id);
- fail("Expected EntityNotFoundException");
- } catch (AtlasBaseException e) {
- // expected
- }
- }
-
- @Override
- protected void assertDeletedColumn(final AtlasEntity.AtlasEntityWithExtInfo tableInstance) throws AtlasException {
- final List<AtlasObjectId> columns = (List<AtlasObjectId>) tableInstance.getEntity().getAttribute(COLUMNS_ATTR_NAME);
- Assert.assertEquals(columns.size(), 2);
- }
-
- @Override
- protected void assertTestDeleteEntities(AtlasEntity.AtlasEntityWithExtInfo tableInstance) {
- int vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.TABLE_TYPE).size();
- assertEquals(vertexCount, 0);
-
- vertexCount = getVertices(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.COLUMN_TYPE).size();
- assertEquals(vertexCount, 0);
- }
-
- @Override
- protected void assertVerticesDeleted(List<AtlasVertex> vertices) {
- assertEquals(vertices.size(), 0);
- }
-
- @Override
- protected void assertTestUpdateEntity_MultiplicityOneNonCompositeReference(String janeGuid) throws Exception {
- // Verify that max is no longer a subordinate of jane.
- ITypedReferenceableInstance jane = metadataService.getEntityDefinition(janeGuid);
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- Assert.assertEquals(subordinates.size(), 1);
- }
-
- @Override
- protected void assertJohnForTestDisconnectBidirectionalReferences(final AtlasEntity.AtlasEntityWithExtInfo john, final String janeGuid) throws Exception {
- assertNull(john.getEntity().getAttribute("manager"));
- }
-
- @Override
- protected void assertMaxForTestDisconnectBidirectionalReferences(Map<String, String> nameGuidMap)
- throws Exception {
- // Verify that the Department.employees reference to the deleted employee
- // was disconnected.
- ITypedReferenceableInstance hrDept = metadataService.getEntityDefinition(nameGuidMap.get("hr"));
- List<ITypedReferenceableInstance> employees = (List<ITypedReferenceableInstance>) hrDept.get("employees");
- Assert.assertEquals(employees.size(), 3);
- String maxGuid = nameGuidMap.get("Max");
- for (ITypedReferenceableInstance employee : employees) {
- Assert.assertNotEquals(employee.getId()._getId(), maxGuid);
- }
-
- // Verify that the Manager.subordinates reference to the deleted employee
- // Max was disconnected.
- ITypedReferenceableInstance jane = metadataService.getEntityDefinition(nameGuidMap.get("Jane"));
- List<ITypedReferenceableInstance> subordinates = (List<ITypedReferenceableInstance>) jane.get("subordinates");
- assertEquals(subordinates.size(), 1);
-
- // Verify that max's Person.mentor unidirectional reference to john was disconnected.
- ITypedReferenceableInstance john = metadataService.getEntityDefinition(nameGuidMap.get("John"));
- assertNull(john.get("mentor"));
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromClassType(
- List<AtlasObjectId> columns, String columnGuid) {
- assertEquals(columns.size(), 2);
- for (AtlasObjectId column : columns) {
- assertFalse(column.getGuid().equals(columnGuid));
- }
- }
-
- protected void assertTestDisconnectMapReferenceFromClassType(final String mapOwnerGuid) throws Exception {
- // Verify map references from mapOwner were disconnected.
- AtlasEntity.AtlasEntityWithExtInfo mapOwnerInstance = entityStore.getById(mapOwnerGuid);
- Map<String, AtlasObjectId> map =
- (Map<String, AtlasObjectId>) mapOwnerInstance.getEntity().getAttribute("map");
- Assert.assertNull(map);
- Map<String, AtlasObjectId> biMap =
- (Map<String, AtlasObjectId>) mapOwnerInstance.getEntity().getAttribute("biMap");
- Assert.assertNull(biMap);
-
- AtlasVertex mapOwnerVertex = GraphHelper.getInstance().getVertexForGUID(mapOwnerGuid);
- Object object = mapOwnerVertex.getProperty("MapOwner.map.value1", String.class);
- assertNull(object);
- object = mapOwnerVertex.getProperty("MapOwner.biMap.value1", String.class);
- assertNull(object);
- }
-
- @Override
- protected void assertTestDisconnectUnidirectionalArrayReferenceFromStructAndTraitTypes(String structContainerGuid)
- throws Exception {
- // Verify that the unidirectional references from the struct and trait instances
- // to the deleted entities were disconnected.
- ITypedReferenceableInstance structContainerConvertedEntity =
- metadataService.getEntityDefinition(structContainerGuid);
- ITypedStruct struct = (ITypedStruct) structContainerConvertedEntity.get("struct");
- assertNull(struct.get("target"));
- IStruct trait = structContainerConvertedEntity.getTrait("TestTrait");
- assertNotNull(trait);
- assertNull(trait.get("target"));
- }
-}
[27/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java b/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
deleted file mode 100755
index 9eb695c..0000000
--- a/repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java
+++ /dev/null
@@ -1,821 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.services;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.ha.HAConfiguration;
-import org.apache.atlas.listener.ActiveStateChangeHandler;
-import org.apache.atlas.listener.ChangedTypeDefs;
-import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.listener.TypeDefChangeListener;
-import org.apache.atlas.listener.TypesChangeListener;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.audit.EntityAuditRepository;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.typestore.ITypeStore;
-import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.apache.atlas.utils.ParamChecker;
-import org.apache.commons.configuration.Configuration;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-
-
-/**
- * Simple wrapper over TypeSystem and MetadataRepository services with hooks
- * for listening to changes to the repository.
- */
-@Singleton
-@Component
-@Deprecated
-public class DefaultMetadataService implements MetadataService, ActiveStateChangeHandler, TypeDefChangeListener {
- private enum OperationType {
- CREATE, UPDATE, DELETE
- };
-
- private static final Logger LOG = LoggerFactory.getLogger(DefaultMetadataService.class);
- private final short maxAuditResults;
- private static final String CONFIG_MAX_AUDIT_RESULTS = "atlas.audit.maxResults";
- private static final short DEFAULT_MAX_AUDIT_RESULTS = 1000;
-
- private final TypeSystem typeSystem;
- private final MetadataRepository repository;
- private final ITypeStore typeStore;
-
- private final Collection<TypesChangeListener> typeChangeListeners = new LinkedHashSet<>();
- private final Collection<EntityChangeListener> entityChangeListeners = new LinkedHashSet<>();
-
- private EntityAuditRepository auditRepository;
-
- @Inject
- public DefaultMetadataService(final MetadataRepository repository, final ITypeStore typeStore,
- final Set<TypesChangeListener> typesChangeListeners,
- final Set<EntityChangeListener> entityChangeListeners,
- final TypeSystem typeSystem,
- final Configuration configuration,
- TypeCache typeCache,
- EntityAuditRepository auditRepository) throws AtlasException {
- this.typeStore = typeStore;
- this.typeSystem = typeSystem;
- /**
- * Ideally a TypeCache implementation should have been injected in the TypeSystemProvider,
- * but a singleton of TypeSystem is constructed privately within the class so that
- * clients of TypeSystem would never instantiate a TypeSystem object directly in
- * their code. As soon as a client makes a call to TypeSystem.getInstance(), they
- * should have the singleton ready for consumption. Manually inject TypeSystem with
- * the Guice-instantiated type cache here, before types are restored.
- * This allows cache implementations to participate in Guice dependency injection.
- */
- this.typeSystem.setTypeCache(typeCache);
-
- this.repository = repository;
-
- this.typeChangeListeners.addAll(typesChangeListeners);
-
- this.entityChangeListeners.addAll(entityChangeListeners);
-
- if (!HAConfiguration.isHAEnabled(configuration)) {
- restoreTypeSystem();
- }
-
- maxAuditResults = configuration.getShort(CONFIG_MAX_AUDIT_RESULTS, DEFAULT_MAX_AUDIT_RESULTS);
-
- this.auditRepository = auditRepository;
- }
-
- private void restoreTypeSystem() throws AtlasException {
- LOG.info("Restoring type system from the store");
-
- TypesDef typesDef = typeStore.restore();
-
- refreshCache(typesDef);
-
- LOG.info("Restored type system from the store");
- }
-
- private void refreshCache(TypesDef typesDef) throws AtlasException {
- if (typesDef != null && !typesDef.isEmpty()) {
- TypeSystem.TransientTypeSystem transientTypeSystem = typeSystem.createTransientTypeSystem(typesDef, true);
- Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
- LOG.info("Number of types got from transient type system: {}", typesAdded.size());
- typeSystem.commitTypes(typesAdded);
- }
- }
-
- /**
- * Creates a new type based on the type system to enable adding
- * entities (instances for types).
- *
- * @param typeDefinition definition as json
- * @return a unique id for this type
- */
- @Override
- public JSONObject createType(String typeDefinition) throws AtlasException {
- return createOrUpdateTypes(OperationType.CREATE, typeDefinition, false);
- }
-
- private JSONObject createOrUpdateTypes(OperationType opType, String typeDefinition, boolean isUpdate) throws AtlasException {
- typeDefinition = ParamChecker.notEmpty(typeDefinition, "type definition");
- TypesDef typesDef = validateTypeDefinition(opType, typeDefinition);
-
-
- try {
- final TypeSystem.TransientTypeSystem transientTypeSystem = typeSystem.createTransientTypeSystem(typesDef, isUpdate);
- final Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
- try {
- /* Create indexes first so that if index creation fails then we rollback
- the typesystem and also do not persist the graph
- */
- if (isUpdate) {
- onTypesUpdated(typesAdded);
- } else {
- onTypesAdded(typesAdded);
- }
- typeStore.store(transientTypeSystem, ImmutableList.copyOf(typesAdded.keySet()));
- typeSystem.commitTypes(typesAdded);
- } catch (Throwable t) {
- throw new AtlasException("Unable to persist types ", t);
- }
-
- return new JSONObject() {{
- put(AtlasClient.TYPES, typesAdded.keySet());
- }};
- } catch (JSONException e) {
- LOG.error("Unable to create response for types={}", typeDefinition, e);
- throw new AtlasException("Unable to create response ", e);
- }
- }
-
- @Override
- public JSONObject updateType(String typeDefinition) throws AtlasException {
- return createOrUpdateTypes(OperationType.UPDATE, typeDefinition, true);
- }
-
- private TypesDef validateTypeDefinition(OperationType opType, String typeDefinition) throws AtlasException {
- final String exceptionErrorMessageFormat = "%s for '%s' failed: %s";
-
- try {
- TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
- if (typesDef.isEmpty()) {
- throw new IllegalArgumentException("Invalid type definition");
- }
-
- for (HierarchicalTypeDefinition<ClassType> t : typesDef.classTypesAsJavaList()) {
- if (!AtlasTypeUtil.isValidTypeName(t.typeName))
- throw new AtlasException(
- String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
- }
-
- for (StructTypeDefinition t : typesDef.structTypesAsJavaList()) {
- if (!AtlasTypeUtil.isValidTypeName(t.typeName))
- throw new AtlasException(
- String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
- }
-
- for (EnumTypeDefinition t : typesDef.enumTypesAsJavaList()) {
- if (!AtlasTypeUtil.isValidTypeName(t.name))
- throw new AtlasException(
- String.format(exceptionErrorMessageFormat, opType.toString(), t.name, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
- }
-
- for (HierarchicalTypeDefinition<TraitType> t : typesDef.traitTypesAsJavaList()) {
- if (!AtlasTypeUtil.isValidTraitTypeName(t.typeName))
- throw new AtlasException(
- String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTraitTypeNameErrorMessage()));
- }
-
- return typesDef;
- }
- catch (Exception e) {
- LOG.error("Unable to deserialize json={}", typeDefinition, e);
- throw new IllegalArgumentException("Unable to deserialize json " + typeDefinition, e);
- }
- }
-
- /**
- * Return the definition for the given type.
- *
- * @param typeName name for this type, must be unique
- * @return type definition as JSON
- */
- @Override
- public String getTypeDefinition(String typeName) throws AtlasException {
- final IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
- return TypesSerialization.toJson(typeSystem, dataType.getName());
- }
-
- /**
- * Return the list of type names in the type system which match the specified filter.
- *
- * @return list of type names
- * @param filterMap - Map of filter for type names. Valid keys are CATEGORY, SUPERTYPE, NOT_SUPERTYPE
- * For example, CATEGORY = TRAIT && SUPERTYPE contains 'X' && SUPERTYPE !contains 'Y'
- * If there is no filter, all the types are returned
- */
- @Override
- public List<String> getTypeNames(Map<TypeCache.TYPE_FILTER, String> filterMap) throws AtlasException {
- return typeSystem.getTypeNames(filterMap);
- }
-
- /**
- * Creates an entity, instance of the type.
- *
- * @param entityInstanceDefinition json array of entity definitions
- * @return guids - list of guids
- */
- @Override
- public CreateUpdateEntitiesResult createEntities(String entityInstanceDefinition) throws AtlasException {
- entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition");
-
- ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition);
-
- return createEntities(typedInstances);
- }
-
- public CreateUpdateEntitiesResult createEntities(ITypedReferenceableInstance[] typedInstances) throws AtlasException {
- final CreateUpdateEntitiesResult result = repository.createEntities(typedInstances);
- onEntitiesAdded(result.getCreatedEntities());
- return result;
- }
-
- @Override
- public ITypedReferenceableInstance[] deserializeClassInstances(String entityInstanceDefinition) throws AtlasException {
- return GraphHelper.deserializeClassInstances(typeSystem, entityInstanceDefinition);
- }
-
- @Override
- public ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entityInstance) throws AtlasException {
- return GraphHelper.getTypedReferenceableInstance(typeSystem, entityInstance);
- }
-
- /**
- * Return the definition for the given guid.
- *
- * @param guid guid
- * @return entity definition as JSON
- */
- @Override
- public String getEntityDefinitionJson(String guid) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
-
- final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
- return InstanceSerialization.toJson(instance, true);
- }
-
- /**
- * Return the definition for the given guid.
- *
- * @param guid guid
- * @return entity definition as JSON
- */
- @Override
- public ITypedReferenceableInstance getEntityDefinition(String guid) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
-
- final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
- return instance;
- }
-
- @Override
- public ITypedReferenceableInstance getEntityDefinitionReference(String entityType, String attribute, String value)
- throws AtlasException {
- validateTypeExists(entityType);
- validateUniqueAttribute(entityType, attribute);
-
- return repository.getEntityDefinition(entityType, attribute, value);
- }
-
- @Override
- public String getEntityDefinition(String entityType, String attribute, String value) throws AtlasException {
- final ITypedReferenceableInstance instance = getEntityDefinitionReference(entityType, attribute, value);
- return InstanceSerialization.toJson(instance, true);
- }
-
- /**
- * Validate that attribute is unique attribute
- * @param entityType the entity type
- * @param attributeName the name of the attribute
- */
- private void validateUniqueAttribute(String entityType, String attributeName) throws AtlasException {
- ClassType type = typeSystem.getDataType(ClassType.class, entityType);
- AttributeInfo attribute = type.fieldMapping().fields.get(attributeName);
- if(attribute == null) {
- throw new IllegalArgumentException(
- String.format("%s is not an attribute in %s", attributeName, entityType));
- }
- if (!attribute.isUnique) {
- throw new IllegalArgumentException(
- String.format("%s.%s is not a unique attribute", entityType, attributeName));
- }
- }
-
- /**
- * Return the list of entity guids for the given type in the repository.
- *
- * @param entityType type
- * @return list of entity guids for the given type in the repository
- */
- @Override
- public List<String> getEntityList(String entityType) throws AtlasException {
- validateTypeExists(entityType);
-
- return repository.getEntityList(entityType);
- }
-
- /**
- * Updates an entity, instance of the type based on the guid set.
- *
- * @param entityInstanceDefinition json array of entity definitions
- * @return guids - json array of guids
- */
- @Override
- public CreateUpdateEntitiesResult updateEntities(String entityInstanceDefinition) throws AtlasException {
- entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition");
- ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition);
-
- CreateUpdateEntitiesResult result = repository.updateEntities(typedInstances);
- onEntitiesAddedUpdated(result.getEntityResult());
- return result;
- }
-
- /**
- * Updates an entity, instance of the type based on the guid set.
- *
- * @param entityInstanceDefinitions
- * @return guids - json array of guids
- */
- @Override
- public CreateUpdateEntitiesResult updateEntities(ITypedReferenceableInstance[] entityInstanceDefinitions) throws AtlasException {
- CreateUpdateEntitiesResult result = repository.updateEntities(entityInstanceDefinitions);
- onEntitiesAddedUpdated(result.getEntityResult());
- return result;
- }
-
- private void onEntitiesAddedUpdated(EntityResult entityResult) throws AtlasException {
- onEntitiesAdded(entityResult.getCreatedEntities());
- onEntitiesUpdated(entityResult.getUpdateEntities());
- //Note: doesn't access deletedEntities from entityResult
- onEntitiesDeleted(RequestContext.get().getDeletedEntities());
- }
-
- @Override
- public CreateUpdateEntitiesResult updateEntityAttributeByGuid(String guid, String attributeName,
- String value) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
- attributeName = ParamChecker.notEmpty(attributeName, "attribute name");
- value = ParamChecker.notEmpty(value, "attribute value");
-
- ITypedReferenceableInstance existInstance = validateEntityExists(guid);
- ClassType type = typeSystem.getDataType(ClassType.class, existInstance.getTypeName());
- ITypedReferenceableInstance newInstance = type.createInstance();
-
- AttributeInfo attributeInfo = type.fieldMapping.fields.get(attributeName);
- if (attributeInfo == null) {
- throw new AtlasException("Invalid property " + attributeName + " for entity " + existInstance.getTypeName());
- }
-
- DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
-
- switch(attrTypeCategory) {
- case PRIMITIVE:
- newInstance.set(attributeName, value);
- break;
- case CLASS:
- Id id = new Id(value, 0, attributeInfo.dataType().getName());
- newInstance.set(attributeName, id);
- break;
- default:
- throw new AtlasException("Update of " + attrTypeCategory + " is not supported");
- }
-
- ((ReferenceableInstance)newInstance).replaceWithNewId(new Id(guid, 0, newInstance.getTypeName()));
- CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
- onEntitiesAddedUpdated(result.getEntityResult());
- return result;
- }
-
- private ITypedReferenceableInstance validateEntityExists(String guid)
- throws EntityNotFoundException, RepositoryException {
- final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
- if (instance == null) {
- throw new EntityNotFoundException(String.format("Entity with guid %s not found ", guid));
- }
- return instance;
- }
-
- @Override
- public CreateUpdateEntitiesResult updateEntityPartialByGuid(String guid, Referenceable newEntity)
- throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "guid cannot be null");
- newEntity = ParamChecker.notNull(newEntity, "updatedEntity cannot be null");
- ITypedReferenceableInstance existInstance = validateEntityExists(guid);
-
- ITypedReferenceableInstance newInstance = validateAndConvertToTypedInstance(newEntity, existInstance.getTypeName());
- ((ReferenceableInstance)newInstance).replaceWithNewId(new Id(guid, 0, newInstance.getTypeName()));
-
- CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
- onEntitiesAddedUpdated(result.getEntityResult());
- return result;
- }
-
- @Override
- public ITypedReferenceableInstance validateAndConvertToTypedInstance(IReferenceableInstance updatedEntity, String typeName)
- throws AtlasException {
- ClassType type = typeSystem.getDataType(ClassType.class, typeName);
- ITypedReferenceableInstance newInstance = type.createInstance(updatedEntity.getId());
-
- for (String attributeName : updatedEntity.getValuesMap().keySet()) {
- AttributeInfo attributeInfo = type.fieldMapping.fields.get(attributeName);
- if (attributeInfo == null) {
- throw new AtlasException("Invalid property " + attributeName + " for entity " + updatedEntity);
- }
-
- DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
- Object value = updatedEntity.get(attributeName);
- switch (attrTypeCategory) {
- case CLASS:
- if (value != null) {
- if (value instanceof Referenceable) {
- newInstance.set(attributeName, value);
- } else {
- Id id = new Id((String) value, 0, attributeInfo.dataType().getName());
- newInstance.set(attributeName, id);
- }
- }
- break;
-
- case ENUM:
- case PRIMITIVE:
- case ARRAY:
- case STRUCT:
- case MAP:
- newInstance.set(attributeName, value);
- break;
-
- case TRAIT:
- //TODO - handle trait updates as well?
- default:
- throw new AtlasException("Update of " + attrTypeCategory + " is not supported");
- }
- }
-
- return newInstance;
- }
-
- @Override
- public CreateUpdateEntitiesResult updateEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
- String attrValue,
- Referenceable updatedEntity) throws AtlasException {
- typeName = ParamChecker.notEmpty(typeName, "typeName");
- uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "uniqueAttributeName");
- attrValue = ParamChecker.notNull(attrValue, "unique attribute value");
- updatedEntity = ParamChecker.notNull(updatedEntity, "updatedEntity");
-
- ITypedReferenceableInstance oldInstance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue);
-
- final ITypedReferenceableInstance newInstance = validateAndConvertToTypedInstance(updatedEntity, typeName);
- ((ReferenceableInstance)newInstance).replaceWithNewId(oldInstance.getId());
-
- CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
- onEntitiesAddedUpdated(result.getEntityResult());
- return result;
- }
-
- private void validateTypeExists(String entityType) throws AtlasException {
- entityType = ParamChecker.notEmpty(entityType, "entity type");
-
- IDataType type = typeSystem.getDataType(IDataType.class, entityType);
- if (type.getTypeCategory() != DataTypes.TypeCategory.CLASS) {
- throw new IllegalArgumentException("type " + entityType + " not a CLASS type");
- }
- }
-
- /**
- * Gets the list of trait names for a given entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @return a list of trait names for the given entity guid
- * @throws AtlasException
- */
- @Override
- public List<String> getTraitNames(String guid) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
- return repository.getTraitNames(guid);
- }
-
- /**
- * Adds a new trait to the list of existing entities represented by their respective guids
- * @param entityGuids list of guids of entities
- * @param traitInstance trait instance json that needs to be added to entities
- * @throws AtlasException
- */
- @Override
- public void addTrait(List<String> entityGuids, ITypedStruct traitInstance) throws AtlasException {
- Preconditions.checkNotNull(entityGuids, "entityGuids list cannot be null");
- Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
-
- final String traitName = traitInstance.getTypeName();
-
- // ensure trait type is already registered with the TypeSystem
- if (!typeSystem.isRegistered(traitName)) {
- String msg = String.format("trait=%s should be defined in type system before it can be added", traitName);
- LOG.error(msg);
- throw new TypeNotFoundException(msg);
- }
-
- //ensure trait is not already registered with any of the given entities
- for (String entityGuid : entityGuids) {
- Preconditions.checkArgument(!getTraitNames(entityGuid).contains(traitName),
- "trait=%s is already defined for entity=%s", traitName, entityGuid);
- }
-
- repository.addTrait(entityGuids, traitInstance);
-
- for (String entityGuid : entityGuids) {
- onTraitAddedToEntity(repository.getEntityDefinition(entityGuid), traitInstance);
- }
- }
-
- /**
- * Adds a new trait to an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitInstanceDefinition trait instance json that needs to be added to entity
- * @throws AtlasException
- */
- @Override
- public void addTrait(String guid, String traitInstanceDefinition) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
- traitInstanceDefinition = ParamChecker.notEmpty(traitInstanceDefinition, "trait instance definition");
-
- ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition);
- addTrait(guid, traitInstance);
- }
-
- public void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException {
- final String traitName = traitInstance.getTypeName();
-
- // ensure trait type is already registered with the TS
- if (!typeSystem.isRegistered(traitName)) {
- String msg = String.format("trait=%s should be defined in type system before it can be added", traitName);
- LOG.error(msg);
- throw new TypeNotFoundException(msg);
- }
-
- // ensure trait is not already defined
- Preconditions
- .checkArgument(!getTraitNames(guid).contains(traitName), "trait=%s is already defined for entity=%s",
- traitName, guid);
-
- repository.addTrait(guid, traitInstance);
-
- onTraitAddedToEntity(repository.getEntityDefinition(guid), traitInstance);
- }
-
- private ITypedStruct deserializeTraitInstance(String traitInstanceDefinition)
- throws AtlasException {
- return createTraitInstance(InstanceSerialization.fromJsonStruct(traitInstanceDefinition, true));
- }
-
- @Override
- public ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException {
- try {
- final String entityTypeName = ParamChecker.notEmpty(traitInstance.getTypeName(), "entity type");
-
- TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName);
- return traitType.convert(traitInstance, Multiplicity.REQUIRED);
- } catch (TypeNotFoundException e) {
- throw e;
- } catch (Exception e) {
- throw new AtlasException("Error deserializing trait instance", e);
- }
- }
-
- @Override
- public IStruct getTraitDefinition(String guid, final String traitName) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
-
- final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
- return instance.getTrait(traitName);
- }
-
- /**
- * Deletes a given trait from an existing entity represented by a guid.
- *
- * @param guid globally unique identifier for the entity
- * @param traitNameToBeDeleted name of the trait
- * @throws AtlasException
- */
- @Override
- public void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
- traitNameToBeDeleted = ParamChecker.notEmpty(traitNameToBeDeleted, "trait name");
-
- // ensure trait type is already registered with the TS
- if (!typeSystem.isRegistered(traitNameToBeDeleted)) {
- final String msg = String.format("trait=%s should be defined in type system before it can be deleted",
- traitNameToBeDeleted);
- LOG.error(msg);
- throw new TypeNotFoundException(msg);
- }
-
- repository.deleteTrait(guid, traitNameToBeDeleted);
-
- onTraitDeletedFromEntity(repository.getEntityDefinition(guid), traitNameToBeDeleted);
- }
-
- private void onTypesAdded(Map<String, IDataType> typesAdded) throws AtlasException {
- for (TypesChangeListener listener : typeChangeListeners) {
- listener.onAdd(typesAdded.values());
- }
- }
-
- private void onEntitiesAdded(List<String> guids) throws AtlasException {
- List<ITypedReferenceableInstance> entities = loadEntities(guids);
- for (EntityChangeListener listener : entityChangeListeners) {
- listener.onEntitiesAdded(entities, false);
- }
- }
-
- private List<ITypedReferenceableInstance> loadEntities(List<String> guids) throws RepositoryException, EntityNotFoundException {
- return repository.getEntityDefinitions(guids.toArray(new String[guids.size()]));
- }
-
- private void onTypesUpdated(Map<String, IDataType> typesUpdated) throws AtlasException {
- for (TypesChangeListener listener : typeChangeListeners) {
- listener.onChange(typesUpdated.values());
- }
- }
-
- private void onEntitiesUpdated(List<String> guids) throws AtlasException {
- List<ITypedReferenceableInstance> entities = loadEntities(guids);
- for (EntityChangeListener listener : entityChangeListeners) {
- listener.onEntitiesUpdated(entities, false);
- }
- }
-
- private void onTraitAddedToEntity(ITypedReferenceableInstance entity, IStruct trait) throws AtlasException {
- Collection<IStruct> traits = Collections.singletonList(trait);
-
- for (EntityChangeListener listener : entityChangeListeners) {
- listener.onTraitsAdded(entity, traits);
- }
- }
-
- private void onTraitDeletedFromEntity(ITypedReferenceableInstance entity, String traitName) throws AtlasException {
- Collection<String> traitNames = Collections.singletonList(traitName);
-
- for (EntityChangeListener listener : entityChangeListeners) {
- listener.onTraitsDeleted(entity, traitNames);
- }
- }
-
- public void registerListener(EntityChangeListener listener) {
- entityChangeListeners.add(listener);
- }
-
- public void unregisterListener(EntityChangeListener listener) {
- entityChangeListeners.remove(listener);
- }
-
- @Override
- public List<EntityAuditEvent> getAuditEvents(String guid, String startKey, short count) throws AtlasException {
- guid = ParamChecker.notEmpty(guid, "entity id");
- startKey = ParamChecker.notEmptyIfNotNull(startKey, "start key");
- ParamChecker.lessThan(count, maxAuditResults, "count");
-
- return auditRepository.listEvents(guid, startKey, count);
- }
-
- /* (non-Javadoc)
- * @see org.apache.atlas.services.MetadataService#deleteEntities(java.lang.String)
- */
- @Override
- public EntityResult deleteEntities(List<String> deleteCandidateGuids) throws AtlasException {
- ParamChecker.notEmpty(deleteCandidateGuids, "delete candidate guids");
- return deleteGuids(deleteCandidateGuids);
- }
-
- @Override
- public EntityResult deleteEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
- String attrValue) throws AtlasException {
- typeName = ParamChecker.notEmpty(typeName, "delete candidate typeName");
- uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "delete candidate unique attribute name");
- attrValue = ParamChecker.notEmpty(attrValue, "delete candidate unique attribute value");
-
- //Throws EntityNotFoundException if the entity could not be found by its unique attribute
- ITypedReferenceableInstance instance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue);
- final Id instanceId = instance.getId();
- List<String> deleteCandidateGuids = new ArrayList<String>() {{ add(instanceId._getId());}};
-
- return deleteGuids(deleteCandidateGuids);
- }
-
- private EntityResult deleteGuids(List<String> deleteCandidateGuids) throws AtlasException {
- EntityResult entityResult = repository.deleteEntities(deleteCandidateGuids);
- onEntitiesAddedUpdated(entityResult);
- return entityResult;
- }
-
- private void onEntitiesDeleted(List<ITypedReferenceableInstance> entities) throws AtlasException {
- for (EntityChangeListener listener : entityChangeListeners) {
- listener.onEntitiesDeleted(entities, false);
- }
- }
-
- /**
- * Create or restore the {@link TypeSystem} cache on server activation.
- *
- * When an instance is passive, types could be created outside of its cache by the active instance.
- * Hence, when this instance becomes active, it needs to restore the cache from the backend store.
- * The first time initialization happens, the indices for these types also needs to be created.
- * This must happen only from the active instance, as it updates shared backend state.
- */
- @Override
- public void instanceIsActive() throws AtlasException {
- LOG.info("Reacting to active state: restoring type system");
- restoreTypeSystem();
- }
-
- @Override
- public void instanceIsPassive() {
- LOG.info("Reacting to passive state: no action right now");
- }
-
- @Override
- public int getHandlerOrder() {
- return HandlerOrder.DEFAULT_METADATA_SERVICE.getOrder();
- }
-
- @Override
- public void onChange(ChangedTypeDefs changedTypeDefs) throws AtlasBaseException {
- // All we need here is a restore of the type-system
- LOG.info("TypeSystem reset invoked by TypeRegistry changes");
- try {
- TypesDef typesDef = typeStore.restore();
- typeSystem.reset();
- TypeSystem.TransientTypeSystem transientTypeSystem
- = typeSystem.createTransientTypeSystem(typesDef, false);
- Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
- LOG.info("Number of types got from transient type system: {}", typesAdded.size());
- typeSystem.commitTypes(typesAdded);
- } catch (AtlasException e) {
- LOG.error("Failed to restore type-system after TypeRegistry changes", e);
- throw new AtlasBaseException(AtlasErrorCode.INTERNAL_ERROR, e);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java b/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java
index 0370c8e..0e1e5b6 100644
--- a/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java
+++ b/repository/src/main/java/org/apache/atlas/util/AtlasRepositoryConfiguration.java
@@ -21,13 +21,9 @@ import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasException;
import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.audit.HBaseBasedAuditRepository;
-import org.apache.atlas.repository.graph.DeleteHandler;
-import org.apache.atlas.repository.graph.SoftDeleteHandler;
import org.apache.atlas.repository.graphdb.GraphDatabase;
import org.apache.atlas.repository.store.graph.v1.DeleteHandlerV1;
import org.apache.atlas.repository.store.graph.v1.SoftDeleteHandlerV1;
-import org.apache.atlas.typesystem.types.cache.DefaultTypeCache;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
import org.apache.commons.configuration.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -66,18 +62,6 @@ public class AtlasRepositoryConfiguration {
return ApplicationProperties.get().getBoolean(ENABLE_FULLTEXT_SEARCH_PROPERTY, true);
}
- @SuppressWarnings("unchecked")
- public static Class<? extends TypeCache> getTypeCache() {
- // Get the type cache implementation class from Atlas configuration.
- try {
- Configuration config = ApplicationProperties.get();
- return ApplicationProperties.getClass(config, TYPE_CACHE_IMPLEMENTATION_PROPERTY,
- DefaultTypeCache.class.getName(), TypeCache.class);
- } catch (AtlasException e) {
- LOG.error("Error loading typecache ", e);
- return DefaultTypeCache.class;
- }
- }
private static final String AUDIT_REPOSITORY_IMPLEMENTATION_PROPERTY = "atlas.EntityAuditRepository.impl";
@SuppressWarnings("unchecked")
@@ -91,20 +75,8 @@ public class AtlasRepositoryConfiguration {
}
}
- private static final String DELETE_HANDLER_IMPLEMENTATION_PROPERTY = "atlas.DeleteHandler.impl";
private static final String DELETE_HANDLER_V1_IMPLEMENTATION_PROPERTY = "atlas.DeleteHandlerV1.impl";
- @SuppressWarnings("unchecked")
- public static Class<? extends DeleteHandler> getDeleteHandlerImpl() {
- try {
- Configuration config = ApplicationProperties.get();
- return ApplicationProperties.getClass(config,
- DELETE_HANDLER_IMPLEMENTATION_PROPERTY, SoftDeleteHandler.class.getName(), DeleteHandler.class);
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
public static Class<? extends DeleteHandlerV1> getDeleteHandlerV1Impl() {
try {
Configuration config = ApplicationProperties.get();
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/AttributeValueMap.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/AttributeValueMap.java b/repository/src/main/java/org/apache/atlas/util/AttributeValueMap.java
index 6e22604..080eefb 100644
--- a/repository/src/main/java/org/apache/atlas/util/AttributeValueMap.java
+++ b/repository/src/main/java/org/apache/atlas/util/AttributeValueMap.java
@@ -23,8 +23,8 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Map of attribute values to a collection of IndexedInstances with that attribute value.
@@ -37,7 +37,7 @@ public class AttributeValueMap {
//need collection in case they are adding the same entity twice?
private Map<Object,Collection<IndexedInstance>> valueMap_ = new HashMap<>();
- public void put(Object value, IReferenceableInstance instance, int index) {
+ public void put(Object value, Referenceable instance, int index) {
IndexedInstance wrapper = new IndexedInstance(instance, index);
Collection<IndexedInstance> existingValues = valueMap_.get(value);
if(existingValues == null) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/CompiledQueryCacheKey.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/CompiledQueryCacheKey.java b/repository/src/main/java/org/apache/atlas/util/CompiledQueryCacheKey.java
deleted file mode 100644
index 56a5a2a..0000000
--- a/repository/src/main/java/org/apache/atlas/util/CompiledQueryCacheKey.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.util;
-
-import org.apache.atlas.query.QueryParams;
-
-/**
- * Represents a key for an entry in the compiled query cache.
- *
- */
-public class CompiledQueryCacheKey {
-
- private final String dslQuery;
- private final QueryParams queryParams;
-
- public CompiledQueryCacheKey(String dslQuery, QueryParams queryParams) {
- super();
- this.dslQuery = dslQuery;
- this.queryParams = queryParams;
- }
-
- public CompiledQueryCacheKey(String dslQuery) {
- super();
- this.dslQuery = dslQuery;
- this.queryParams = null;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((dslQuery == null) ? 0 : dslQuery.hashCode());
- result = prime * result + ((queryParams == null) ? 0 : queryParams.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
-
- if (this == obj) {
- return true;
- }
-
- if (obj == null) {
- return false;
- }
- if (!(obj instanceof CompiledQueryCacheKey)) {
- return false;
- }
-
- CompiledQueryCacheKey other = (CompiledQueryCacheKey) obj;
- if (! equals(dslQuery, other.dslQuery)) {
- return false;
- }
-
- if (! equals(queryParams, other.queryParams)) {
- return false;
- }
-
- return true;
- }
-
- private static boolean equals(Object o1, Object o2) {
- if(o1 == o2) {
- return true;
- }
- if(o1 == null) {
- return o2 == null;
- }
- return o1.equals(o2);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/IndexedInstance.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/IndexedInstance.java b/repository/src/main/java/org/apache/atlas/util/IndexedInstance.java
index 60ec8cc..17ff511 100644
--- a/repository/src/main/java/org/apache/atlas/util/IndexedInstance.java
+++ b/repository/src/main/java/org/apache/atlas/util/IndexedInstance.java
@@ -17,8 +17,8 @@
*/
package org.apache.atlas.util;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.typesystem.IReferenceableInstance;
/**
* Data structure that stores an IReferenceableInstance and its location within
@@ -28,16 +28,16 @@ import org.apache.atlas.typesystem.IReferenceableInstance;
*/
public class IndexedInstance {
- private final IReferenceableInstance instance_;
+ private final Referenceable instance_;
private final int index_;
- public IndexedInstance(IReferenceableInstance instance, int index) {
+ public IndexedInstance(Referenceable instance, int index) {
super();
this.instance_ = instance;
this.index_ = index;
}
- public IReferenceableInstance getInstance() {
+ public Referenceable getInstance() {
return instance_;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/NoopGremlinQuery.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/NoopGremlinQuery.java b/repository/src/main/java/org/apache/atlas/util/NoopGremlinQuery.java
deleted file mode 100644
index 280570e..0000000
--- a/repository/src/main/java/org/apache/atlas/util/NoopGremlinQuery.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.util;
-
-import org.apache.atlas.query.GremlinQuery;
-import org.apache.atlas.typesystem.types.IDataType;
-
-/**
- * Represents a query that we know will have no results.
- *
- */
-public class NoopGremlinQuery extends GremlinQuery {
-
- private final IDataType dataType;
-
- public NoopGremlinQuery(IDataType dataType) {
- super(null, null, null);
- this.dataType = dataType;
- }
-
- public IDataType getDataType() {
- return dataType;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/util/TypeDefSorter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/util/TypeDefSorter.java b/repository/src/main/java/org/apache/atlas/util/TypeDefSorter.java
deleted file mode 100644
index 733aefd..0000000
--- a/repository/src/main/java/org/apache/atlas/util/TypeDefSorter.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.util;
-
-import org.apache.atlas.model.typedef.AtlasClassificationDef;
-import org.apache.atlas.model.typedef.AtlasEntityDef;
-import org.apache.atlas.model.typedef.AtlasStructDef;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class TypeDefSorter {
- private static final Logger LOG = LoggerFactory.getLogger(TypeDefSorter.class);
-
- public static <T extends AtlasStructDef> List<T> sortTypes(List<T> types) {
- Map<String, T> typesByName = new HashMap<>();
- for (T type : types) {
- typesByName.put(type.getName(), type);
- }
- List<T> result = new ArrayList<>(types.size());
- Set<T> processed = new HashSet<>();
- for (T type : types) {
- addToResult(type, result, processed, typesByName);
- }
- return result;
- }
-
- private static <T extends AtlasStructDef> void addToResult(T type, List<T> result,
- Set<T> processed,
- Map<String, T> typesByName) {
- if (processed.contains(type)) {
- return;
- }
- processed.add(type);
- Set<String> superTypeNames = new HashSet<>();
- if (type.getClass().equals(AtlasClassificationDef.class)) {
- try {
- AtlasClassificationDef classificationDef = AtlasClassificationDef.class.cast(type);
- superTypeNames.addAll(classificationDef.getSuperTypes());
- } catch (ClassCastException ex) {
- LOG.warn("Casting to ClassificationDef failed");
- }
- }
- if (type.getClass().equals(AtlasEntityDef.class)) {
- try {
- AtlasEntityDef entityDef = AtlasEntityDef.class.cast(type);
- superTypeNames.addAll(entityDef.getSuperTypes());
- } catch (ClassCastException ex) {
- LOG.warn("Casting to AtlasEntityDef failed");
- }
- }
-
- for (String superTypeName : superTypeNames) {
- // Recursively add any supertypes first to the result.
- T superType = typesByName.get(superTypeName);
- if (superType != null) {
- addToResult(superType, result, processed, typesByName);
- }
- }
- result.add(type);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/ClosureQuery.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/ClosureQuery.scala b/repository/src/main/scala/org/apache/atlas/query/ClosureQuery.scala
deleted file mode 100644
index 1190114..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/ClosureQuery.scala
+++ /dev/null
@@ -1,327 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import java.util
-
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.query.Expressions._
-import org.apache.atlas.typesystem.ITypedStruct
-import org.apache.atlas.typesystem.json.{InstanceSerialization, Serialization}
-import org.apache.atlas.typesystem.persistence.StructInstance
-import org.apache.atlas.typesystem.types.DataTypes.{MapType, PrimitiveType}
-import org.apache.atlas.typesystem.types.{DataTypes, StructType, TypeSystem}
-
-/**
- * Represents a Query to compute the closure based on a relationship between entities of a particular type.
- * For e.g. Database Tables are related to each other to capture the '''Lineage''' of data in a Table based
- * on other Tables.
- *
- * A Closure Query is specified by the following information:
- * - The Type whose instances are in a closure relationship. For e.g. 'Table'
- * - The Closure relation. This is specified as an ''Attribute path''. For e.g. if we have the following model:
- * {{{
- * class Table {
- * name : String,
- * ...
- * }
- *
- * class LoadTableProcess {
- * name : String,
- * inputTables : List[Table],
- * outputTable : Table,
- * ...
- * }
- * }}}
- * ''LoadTable'' instance captures the relationship between the data in an output Table and a set of input Tables.
- * In order to compute the '''Lineage''' of a Table, the ''Attribute path'' that relates 2 Tables is
- * '''[(LoadTableProcess,outputTable), inputTables]'''. This list is saying that for any Table I want to connect to other
- * tables via the LoadProcess.outputTable attribute, and then via the inputTables attribute. So each entry in the
- * Attribute Path represents an attribute in an object. For reverse relations the Type and attribute must be specified,
- * as in 'LoadTableProcess,outputTable)', whereas for forward relations the attribute name is sufficient.
- * - The depth of the traversal. Certain times you are not interested in the complete closure, but to only
- * discover related instances up to a certain depth. Specify the depth as number of hops, or you can ask for the
- * complete closure.
- * - You can ask for certain attributes to be returned. For e.g. you may only want the Table name, owner and
- * creationDate. By default only the Ids of the related instances is returned.
- * - For pair of related instances, you optionally ask for the Path of the relation to be returned. This is
- * returned as a list of ''Id''s.
- *
- * Given these 5 things the ClosureQuery can be executed, it returns a GremlinQueryResult of the Closure Query.
- */
-trait ClosureQuery {
-
- val SRC_PREFIX = TypeUtils.GraphResultStruct.SRC_PREFIX
- val DEST_PREFIX = TypeUtils.GraphResultStruct.DEST_PREFIX
-
- sealed trait PathAttribute {
-
- def toExpr : Expression = this match {
- case r : Relation => fieldId(r.attributeName)
- case rr : ReverseRelation => fieldId(s"${rr.typeName}->${rr.attributeName}")
- }
-
- def toFieldName : String = this match {
- case r : Relation => r.attributeName
- case rr : ReverseRelation => rr.typeName
- }
- }
- case class ReverseRelation(typeName : String, attributeName : String) extends PathAttribute
- case class Relation(attributeName : String) extends PathAttribute
-
- /**
- * Type on whose instances the closure needs to be computed
- * @return
- */
- def closureType : String
-
- /**
- * specify how instances are related.
- */
- def closureRelation : List[PathAttribute]
-
- /**
- * The maximum hops between related instances. A [[None]] implies there is maximum.
- * @return
- */
- def depth : Option[Int]
-
- /**
- * The attributes to return for the instances. These will be prefixed by 'src_' and 'dest_' in the
- * output rows.
- * @return
- */
- def selectAttributes : Option[List[String]]
-
- /**
- * specify if the Path should be returned.
- * @return
- */
- def withPath : Boolean
-
- def persistenceStrategy: GraphPersistenceStrategies
- def g: AtlasGraph[_,_]
-
- def pathExpr : Expressions.Expression = {
- closureRelation.tail.foldLeft(closureRelation.head.toExpr)((b,a) => b.field(a.toFieldName))
- }
-
- def selectExpr(alias : String) : List[Expression] = {
- selectAttributes.map { _.map { a =>
- fieldId(alias).field(a).as(s"${alias}_$a")
- }
- }.getOrElse(List(fieldId(alias)))
- }
-
- /**
- * hook to allow a filter to be added for the closureType
- * @param expr
- * @return
- */
- def srcCondition(expr : Expression) : Expression = expr
-
- def expr : Expressions.Expression = {
- val e = srcCondition(Expressions._class(closureType)).as(SRC_PREFIX).loop(pathExpr).as(DEST_PREFIX).
- select((selectExpr(SRC_PREFIX) ++ selectExpr(DEST_PREFIX)):_*)
- if (withPath) e.path else e
- }
-
- def evaluate(): GremlinQueryResult = {
- var e = expr
- QueryProcessor.evaluate(e, g, persistenceStrategy)
- }
-
- def graph(res: GremlinQueryResult) : GraphResult = {
-
- if (!withPath) {
- throw new ExpressionException(expr, "Graph requested for non Path Query")
- }
-
- import scala.collection.JavaConverters._
-
- val graphResType = TypeUtils.GraphResultStruct.createType(res.resultDataType.asInstanceOf[StructType])
- val vertexPayloadType = {
- val mT = graphResType.fieldMapping.fields.get(TypeUtils.GraphResultStruct.verticesAttrName).
- dataType().asInstanceOf[MapType]
- mT.getValueType.asInstanceOf[StructType]
- }
-
- def id(idObj : StructInstance) : String = idObj.getString(TypeSystem.ID_STRUCT_ID_ATTRNAME)
-
- def vertexStruct(idObj : StructInstance, resRow : ITypedStruct, attrPrefix : String) : StructInstance = {
- val vP = vertexPayloadType.createInstance()
- vP.set(TypeUtils.GraphResultStruct.vertexIdAttrName, idObj)
- vertexPayloadType.fieldMapping.fields.asScala.keys.
- filter(_ != TypeUtils.GraphResultStruct.vertexIdAttrName).foreach{a =>
- vP.set(a, resRow.get(s"${attrPrefix}$a"))
- }
- vP.asInstanceOf[StructInstance]
- }
-
- val instance = graphResType.createInstance()
- val vertices = new util.HashMap[String, AnyRef]()
- val edges = new util.HashMap[String,java.util.List[String]]()
-
- /**
- * foreach resultRow
- * for each Path entry
- * add an entry in the edges Map
- * add an entry for the Src vertex to the vertex Map
- * add an entry for the Dest vertex to the vertex Map
- */
- res.rows.asScala.map(_.asInstanceOf[StructInstance]).foreach { r =>
- val path = r.get(TypeUtils.ResultWithPathStruct.pathAttrName).asInstanceOf[java.util.List[_]].asScala
- val srcVertex = path.head.asInstanceOf[StructInstance]
-
- var currVertex = srcVertex
- path.tail.foreach { n =>
- val nextVertex = n.asInstanceOf[StructInstance]
- val iList = if (!edges.containsKey(id(currVertex))) {
- val l = new util.ArrayList[String]()
- edges.put(id(currVertex), l)
- l
- } else {
- edges.get(id(currVertex))
- }
- if ( !iList.contains(id(nextVertex))) {
- iList.add(id(nextVertex))
- }
- currVertex = nextVertex
- }
- val vertex = r.get(TypeUtils.ResultWithPathStruct.resultAttrName)
- vertices.put(id(srcVertex), vertexStruct(srcVertex,
- r.get(TypeUtils.ResultWithPathStruct.resultAttrName).asInstanceOf[ITypedStruct],
- s"${SRC_PREFIX}_"))
- vertices.put(id(currVertex), vertexStruct(currVertex,
- r.get(TypeUtils.ResultWithPathStruct.resultAttrName).asInstanceOf[ITypedStruct],
- s"${DEST_PREFIX}_"))
- }
-
- instance.set(TypeUtils.GraphResultStruct.verticesAttrName, vertices)
- instance.set(TypeUtils.GraphResultStruct.edgesAttrName, edges)
- GraphResult(res.query, instance)
- }
-}
-
-/**
- * Closure for a single instance. Instance is specified by an ''attributeToSelectInstance'' and the value
- * for the attribute.
- *
- * @tparam T
- */
-trait SingleInstanceClosureQuery[T] extends ClosureQuery {
-
- def attributeToSelectInstance : String
-
- def attributeTyp : PrimitiveType[T]
- def instanceValue : T
-
- override def srcCondition(expr : Expression) : Expression = {
- expr.where(
- Expressions.fieldId(attributeToSelectInstance).`=`(Expressions.literal(attributeTyp, instanceValue))
- )
- }
-}
-
-import scala.language.existentials;
-/**
- * A ClosureQuery to compute '''Lineage''' for Hive tables. Assumes the Lineage relation is captured in a ''CTAS''
- * type, and the table relations are captured as attributes from a CTAS instance to Table instances.
- *
- * @param tableTypeName The name of the Table Type.
- * @param ctasTypeName The name of the Create Table As Select(CTAS) Type.
- * @param ctasInputTableAttribute The attribute in CTAS Type that associates it to the ''Input'' tables.
- * @param ctasOutputTableAttribute The attribute in CTAS Type that associates it to the ''Output'' tables.
- * @param depth depth as needed by the closure Query.
- * @param selectAttributes as needed by the closure Query.
- * @param withPath as needed by the closure Query.
- * @param persistenceStrategy as needed to evaluate the Closure Query.
- * @param g as needed to evaluate the Closure Query.
- */
-case class InputLineageClosureQuery(tableTypeName : String,
- attributeToSelectInstance : String,
- tableName : String,
- ctasTypeName : String,
- ctasInputTableAttribute : String,
- ctasOutputTableAttribute : String,
- depth : Option[Int],
- selectAttributes : Option[List[String]],
- withPath : Boolean,
- persistenceStrategy: GraphPersistenceStrategies,
- g: AtlasGraph[_,_]
- ) extends SingleInstanceClosureQuery[String] {
-
- val closureType : String = tableTypeName
-
- val attributeTyp = DataTypes.STRING_TYPE
-
- val instanceValue = tableName
-
- lazy val closureRelation = List(
- ReverseRelation(ctasTypeName, ctasOutputTableAttribute),
- Relation(ctasInputTableAttribute)
- )
-}
-
-/**
- * A ClosureQuery to compute where a table is used based on the '''Lineage''' for Hive tables.
- * Assumes the Lineage relation is captured in a ''CTAS''
- * type, and the table relations are captured as attributes from a CTAS instance to Table instances.
- *
- * @param tableTypeName The name of the Table Type.
- * @param ctasTypeName The name of the Create Table As Select(CTAS) Type.
- * @param ctasInputTableAttribute The attribute in CTAS Type that associates it to the ''Input'' tables.
- * @param ctasOutputTableAttribute The attribute in CTAS Type that associates it to the ''Output'' tables.
- * @param depth depth as needed by the closure Query.
- * @param selectAttributes as needed by the closure Query.
- * @param withPath as needed by the closure Query.
- * @param persistenceStrategy as needed to evaluate the Closure Query.
- * @param g as needed to evaluate the Closure Query.
- */
-case class OutputLineageClosureQuery(tableTypeName : String,
- attributeToSelectInstance : String,
- tableName : String,
- ctasTypeName : String,
- ctasInputTableAttribute : String,
- ctasOutputTableAttribute : String,
- depth : Option[Int],
- selectAttributes : Option[List[String]],
- withPath : Boolean,
- persistenceStrategy: GraphPersistenceStrategies,
- g: AtlasGraph[_,_]
- ) extends SingleInstanceClosureQuery[String] {
-
- val closureType : String = tableTypeName
-
- val attributeTyp = DataTypes.STRING_TYPE
-
- val instanceValue = tableName
-
- lazy val closureRelation = List(
- ReverseRelation(ctasTypeName, ctasInputTableAttribute),
- Relation(ctasOutputTableAttribute)
- )
-}
-
-case class GraphResult(query: String, result : ITypedStruct) {
-
- def toTypedJson = Serialization.toJson(result)
-
- def toInstanceJson = InstanceSerialization.toJson(result)
-}
\ No newline at end of file
[12/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/server-api/src/main/java/org/apache/atlas/typesystem/exception/TraitNotFoundException.java
----------------------------------------------------------------------
diff --git a/server-api/src/main/java/org/apache/atlas/typesystem/exception/TraitNotFoundException.java b/server-api/src/main/java/org/apache/atlas/typesystem/exception/TraitNotFoundException.java
deleted file mode 100644
index 2d5663e..0000000
--- a/server-api/src/main/java/org/apache/atlas/typesystem/exception/TraitNotFoundException.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-/**
- * A simple wrapper for 404.
- * Thrown when a requested trait can not be found.
- */
-public class TraitNotFoundException extends NotFoundException {
- public TraitNotFoundException() {
- }
-
- public TraitNotFoundException(String message) {
- super(message);
- }
-
- public TraitNotFoundException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public TraitNotFoundException(Throwable cause) {
- super(cause);
- }
-
- public TraitNotFoundException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/.gitignore
----------------------------------------------------------------------
diff --git a/typesystem/.gitignore b/typesystem/.gitignore
deleted file mode 100755
index c7029f8..0000000
--- a/typesystem/.gitignore
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Eclipse
-.classpath
-.project
-.settings/
-
-# Intellij
-.idea/
-*.iml
-*.iws
-
-# Mac
-.DS_Store
-
-# Maven
-log/
-target/
-
-# Emacs
-*~
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/LICENSE.md
----------------------------------------------------------------------
diff --git a/typesystem/LICENSE.md b/typesystem/LICENSE.md
deleted file mode 100755
index 5c304d1..0000000
--- a/typesystem/LICENSE.md
+++ /dev/null
@@ -1,201 +0,0 @@
-Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "{}"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright {yyyy} {name of copyright owner}
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/doc/IDataType.uml
----------------------------------------------------------------------
diff --git a/typesystem/doc/IDataType.uml b/typesystem/doc/IDataType.uml
deleted file mode 100755
index 68051c3..0000000
--- a/typesystem/doc/IDataType.uml
+++ /dev/null
@@ -1,190 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<Diagram>
- <ID>JAVA</ID>
- <OriginalElement>org.apache.atlas.types.IDataType</OriginalElement>
- <nodes>
- <node x="2063.5" y="401.0">org.apache.atlas.types.DataTypes.PrimitiveType</node>
- <node x="1307.0" y="687.0">org.apache.atlas.types.DataTypes.BigIntegerType</node>
- <node x="3098.0" y="687.0">org.apache.atlas.types.DataTypes.ShortType</node>
- <node x="2899.0" y="687.0">org.apache.atlas.types.DataTypes.DoubleType</node>
- <node x="1705.0" y="687.0">org.apache.atlas.types.DataTypes.FloatType</node>
- <node x="2650.5" y="335.0">org.apache.atlas.types.DataTypes.MapType</node>
- <node x="2501.0" y="687.0">org.apache.atlas.types.DataTypes.BooleanType</node>
- <node x="2073.5" y="168.0">org.apache.atlas.types.AbstractDataType</node>
- <node x="1108.0" y="687.0">org.apache.atlas.types.DataTypes.StringType</node>
- <node x="0.0" y="632.0">org.apache.atlas.types.ClassType</node>
- <node x="9.75" y="302.0">org.apache.atlas.types.HierarchicalType</node>
- <node x="2103.0" y="687.0">org.apache.atlas.types.DataTypes.BigDecimalType</node>
- <node x="736.0" y="632.0">org.apache.atlas.types.StructType</node>
- <node x="1506.0" y="687.0">org.apache.atlas.types.DataTypes.DateType</node>
- <node x="2341.5" y="357.0">org.apache.atlas.types.DataTypes.ArrayType</node>
- <node x="398.0" y="665.0">org.apache.atlas.types.TraitType</node>
- <node x="2700.0" y="687.0">org.apache.atlas.types.DataTypes.IntType</node>
- <node x="1904.0" y="687.0">org.apache.atlas.types.DataTypes.ByteType</node>
- <node x="2302.0" y="687.0">org.apache.atlas.types.DataTypes.LongType</node>
- <node x="544.0" y="390.0">org.apache.atlas.types.IConstructableType</node>
- <node x="1305.0" y="0.0">org.apache.atlas.types.IDataType</node>
- </nodes>
- <notes />
- <edges>
- <edge source="org.apache.atlas.types.StructType" target="org.apache.atlas.types.AbstractDataType">
- <point x="88.0" y="-92.0" />
- <point x="1000.0" y="282.0" />
- <point x="2144.9" y="282.0" />
- <point x="-47.59999999999991" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.BooleanType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="2590.5" y="602.0" />
- <point x="2239.409090909091" y="602.0" />
- <point x="46.90909090909099" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.IntType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="2789.5" y="592.0" />
- <point x="2262.863636363636" y="592.0" />
- <point x="70.36363636363626" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.ByteType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="1993.5" y="612.0" />
- <point x="2169.0454545454545" y="612.0" />
- <point x="-23.454545454545496" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.MapType" target="org.apache.atlas.types.AbstractDataType">
- <point x="0.0" y="-92.0" />
- <point x="2811.0" y="272.0" />
- <point x="2287.7" y="272.0" />
- <point x="95.20000000000005" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.StringType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="-5.6843418860808015E-14" y="-37.0" />
- <point x="1197.5" y="572.0" />
- <point x="2075.227272727273" y="572.0" />
- <point x="-117.27272727272725" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.PrimitiveType" target="org.apache.atlas.types.AbstractDataType">
- <point x="0.0" y="-26.0" />
- <point x="0.0" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.ArrayType" target="org.apache.atlas.types.AbstractDataType">
- <point x="0.0" y="-70.0" />
- <point x="2486.0" y="282.0" />
- <point x="2240.1" y="282.0" />
- <point x="47.59999999999991" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.IConstructableType" target="org.apache.atlas.types.IDataType">
- <point x="0.0" y="-37.0" />
- <point x="636.5" y="262.0" />
- <point x="511.03656126482247" y="262.0" />
- <point x="511.03656126482247" y="143.0" />
- <point x="1359.75" y="143.0" />
- <point x="-54.75" y="59.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.LongType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="2391.5" y="612.0" />
- <point x="2215.9545454545455" y="612.0" />
- <point x="23.454545454545496" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.DateType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="1595.5" y="592.0" />
- <point x="2122.1363636363635" y="592.0" />
- <point x="-70.36363636363637" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.TraitType" target="org.apache.atlas.types.HierarchicalType">
- <point x="-79.5" y="-59.0" />
- <point x="477.5" y="602.0" />
- <point x="264.0" y="602.0" />
- <point x="84.75" y="125.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.DoubleType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="2988.5" y="582.0" />
- <point x="2286.318181818182" y="582.0" />
- <point x="93.81818181818176" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.ShortType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="3187.5" y="572.0" />
- <point x="2309.772727272727" y="572.0" />
- <point x="117.27272727272725" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.StructType" target="org.apache.atlas.types.IConstructableType">
- <point x="-88.0" y="-92.0" />
- <point x="824.0" y="612.0" />
- <point x="698.1666666666666" y="612.0" />
- <point x="61.66666666666663" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.HierarchicalType" target="org.apache.atlas.types.AbstractDataType">
- <point x="0.0" y="-125.0" />
- <point x="179.25" y="272.0" />
- <point x="2097.3" y="272.0" />
- <point x="-95.20000000000005" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.BigDecimalType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="0.0" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.FloatType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="1794.5" y="602.0" />
- <point x="2145.590909090909" y="602.0" />
- <point x="-46.90909090909099" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.types.ClassType" target="org.apache.atlas.types.HierarchicalType">
- <point x="-94.5" y="-92.0" />
- <point x="-84.75" y="125.0" />
- </edge>
- <edge source="org.apache.atlas.types.AbstractDataType" target="org.apache.atlas.types.IDataType">
- <point x="0.0" y="-37.0" />
- <point x="2192.5" y="143.0" />
- <point x="1469.25" y="143.0" />
- <point x="54.75" y="59.0" />
- </edge>
- <edge source="org.apache.atlas.types.TraitType" target="org.apache.atlas.types.IConstructableType">
- <point x="79.5" y="-59.0" />
- <point x="0.0" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.ClassType" target="org.apache.atlas.types.IConstructableType">
- <point x="94.5" y="-92.0" />
- <point x="283.5" y="612.0" />
- <point x="574.8333333333333" y="612.0" />
- <point x="-61.66666666666666" y="37.0" />
- </edge>
- <edge source="org.apache.atlas.types.DataTypes.BigIntegerType" target="org.apache.atlas.types.DataTypes.PrimitiveType">
- <point x="0.0" y="-37.0" />
- <point x="1396.5" y="582.0" />
- <point x="2098.681818181818" y="582.0" />
- <point x="-93.81818181818176" y="26.0" />
- </edge>
- </edges>
- <settings layout="Hierarchic Group" zoom="1.0" x="1212.0" y="-764.5" />
- <SelectedNodes>
- <node>org.apache.atlas.types.DataTypes.StringType</node>
- </SelectedNodes>
- <Categories>
- <Category>Methods</Category>
- </Categories>
- <SCOPE>All</SCOPE>
- <VISIBILITY>private</VISIBILITY>
-</Diagram>
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/doc/Instance.uml
----------------------------------------------------------------------
diff --git a/typesystem/doc/Instance.uml b/typesystem/doc/Instance.uml
deleted file mode 100755
index e11c685..0000000
--- a/typesystem/doc/Instance.uml
+++ /dev/null
@@ -1,126 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-<Diagram>
- <ID>JAVA</ID>
- <OriginalElement>org.apache.atlas.storage.ReferenceableInstance</OriginalElement>
- <nodes>
- <node x="698.875" y="259.0">org.apache.atlas.storage.DownCastStructInstance</node>
- <node x="30.536458333333258" y="628.0">org.apache.atlas.storage.ReferenceableInstance</node>
- <node x="369.875" y="259.0">org.apache.atlas.IReferenceableInstance</node>
- <node x="0.0" y="146.0">org.apache.atlas.ITypedInstance</node>
- <node x="4.7864583333332575" y="482.0">org.apache.atlas.ITypedReferenceableInstance</node>
- <node x="269.40624999999994" y="0.0">org.apache.atlas.IInstance</node>
- <node x="947.875" y="248.0">org.apache.atlas.Struct</node>
- <node x="651.96875" y="449.0">org.apache.atlas.Referenceable</node>
- <node x="240.7864583333333" y="416.0">org.apache.atlas.storage.StructInstance</node>
- <node x="236.87499999999997" y="292.0">org.apache.atlas.ITypedStruct</node>
- <node x="593.1770833333333" y="157.0">org.apache.atlas.IStruct</node>
- </nodes>
- <notes />
- <edges>
- <edge source="org.apache.atlas.storage.ReferenceableInstance" target="org.apache.atlas.storage.StructInstance">
- <point x="82.25" y="-59.0" />
- <point x="277.28645833333326" y="603.0" />
- <point x="385.28645833333326" y="603.0" />
- <point x="0.0" y="81.0" />
- </edge>
- <edge source="org.apache.atlas.Referenceable" target="org.apache.atlas.IReferenceableInstance">
- <point x="-77.25" y="-48.0" />
- <point x="729.21875" y="396.0" />
- <point x="601.625" y="396.0" />
- <point x="77.25" y="48.0" />
- </edge>
- <edge source="org.apache.atlas.storage.DownCastStructInstance" target="org.apache.atlas.IStruct">
- <point x="0.0" y="-48.0" />
- <point x="813.375" y="228.0" />
- <point x="640.0520833333333" y="228.0" />
- <point x="9.375" y="15.0" />
- </edge>
- <edge source="org.apache.atlas.Struct" target="org.apache.atlas.IStruct">
- <point x="0.0" y="-59.0" />
- <point x="1072.375" y="218.0" />
- <point x="658.8020833333333" y="218.0" />
- <point x="28.125" y="15.0" />
- </edge>
- <edge source="org.apache.atlas.ITypedInstance" target="org.apache.atlas.IInstance">
- <point x="0.0" y="-26.0" />
- <point x="116.0" y="121.0" />
- <point x="326.65624999999994" y="121.0" />
- <point x="-57.25" y="48.0" />
- </edge>
- <edge source="org.apache.atlas.IStruct" target="org.apache.atlas.IInstance">
- <point x="0.0" y="-15.0" />
- <point x="630.6770833333333" y="121.0" />
- <point x="441.15624999999994" y="121.0" />
- <point x="57.25" y="48.0" />
- </edge>
- <edge source="org.apache.atlas.IReferenceableInstance" target="org.apache.atlas.IStruct">
- <point x="0.0" y="-48.0" />
- <point x="524.375" y="228.0" />
- <point x="621.3020833333333" y="228.0" />
- <point x="-9.375" y="15.0" />
- </edge>
- <edge source="org.apache.atlas.Referenceable" target="org.apache.atlas.Struct">
- <point x="77.25" y="-48.0" />
- <point x="883.71875" y="396.0" />
- <point x="1072.375" y="396.0" />
- <point x="0.0" y="59.0" />
- </edge>
- <edge source="org.apache.atlas.ITypedStruct" target="org.apache.atlas.IStruct">
- <point x="28.25" y="-15.0" />
- <point x="321.625" y="218.0" />
- <point x="602.5520833333333" y="218.0" />
- <point x="-28.125" y="15.0" />
- </edge>
- <edge source="org.apache.atlas.ITypedStruct" target="org.apache.atlas.ITypedInstance">
- <point x="-28.25" y="-15.0" />
- <point x="265.125" y="228.0" />
- <point x="174.0" y="228.0" />
- <point x="58.0" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.storage.StructInstance" target="org.apache.atlas.ITypedStruct">
- <point x="0.0" y="-81.0" />
- <point x="385.28645833333326" y="386.0" />
- <point x="293.375" y="386.0" />
- <point x="0.0" y="15.0" />
- </edge>
- <edge source="org.apache.atlas.ITypedReferenceableInstance" target="org.apache.atlas.ITypedInstance">
- <point x="-54.0" y="-15.0" />
- <point x="-58.0" y="26.0" />
- </edge>
- <edge source="org.apache.atlas.ITypedReferenceableInstance" target="org.apache.atlas.IReferenceableInstance">
- <point x="54.0" y="-15.0" />
- <point x="166.78645833333326" y="396.0" />
- <point x="447.125" y="396.0" />
- <point x="-77.25" y="48.0" />
- </edge>
- <edge source="org.apache.atlas.storage.ReferenceableInstance" target="org.apache.atlas.ITypedReferenceableInstance">
- <point x="-82.25" y="-59.0" />
- <point x="0.0" y="15.0" />
- </edge>
- </edges>
- <settings layout="Hierarchic Group" zoom="1.0" x="239.0" y="335.5" />
- <SelectedNodes />
- <Categories>
- <Category>Methods</Category>
- </Categories>
- <SCOPE>All</SCOPE>
- <VISIBILITY>private</VISIBILITY>
-</Diagram>
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/doc/QueryDSL.org
----------------------------------------------------------------------
diff --git a/typesystem/doc/QueryDSL.org b/typesystem/doc/QueryDSL.org
deleted file mode 100755
index 22b7386..0000000
--- a/typesystem/doc/QueryDSL.org
+++ /dev/null
@@ -1,266 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#+TITLE: Query DSL
-#+AUTHOR: Harish Butani
-#+EMAIL: hbutani@apache.org
-#+LANGUAGE: en
-#+INFOJS_OPT: view:showall toc:t ltoc:t mouse:underline path:http://orgmode.org/org-info.js
-#+LINK_HOME: http://home.fnal.gov/~neilsen
-#+LINK_UP: http://home.fnal.gov/~neilsen/notebook
-#+HTML_HEAD: <link rel="stylesheet" type="text/css" href="http://orgmode.org/org-manual.css" />
-#+LaTeX_CLASS: smarticle
-#+LaTeX_HEADER: \pdfmapfile{/home/neilsen/texmf/fonts/map/dvips/libertine/libertine.map}
-#+LaTeX_HEADER: \usepackage[ttscale=.875]{libertine}
-#+LaTeX_HEADER: \usepackage{sectsty}
-#+LaTeX_HEADER: \sectionfont{\normalfont\scshape}
-#+LaTeX_HEADER: \subsectionfont{\normalfont\itshape}
-#+EXPORT_SELECT_TAGS: export
-#+EXPORT_EXCLUDE_TAGS: noexport
-#+OPTIONS: H:2 num:nil toc:nil \n:nil @:t ::t |:t ^:{} _:{} *:t TeX:t LaTeX:t
-#+STARTUP: showall
-#+OPTIONS: html-postamble:nil
-
-** Example Type Definitions
-#+begin_src plantuml :file class_diagram.png
-scale 1300 width
-
-note left of Trait : traits are classifications/tags attached to Instances
-class Trait
-Trait <|-- JDbcAccess
-Trait <|-- PII
-Trait <|-- Dimension
-Trait <|-- Metric
-Trait <|-- ETL
-
-class Object
-
-Object --* Trait : traits >
-
-Object <|-- DB
-Object <|-- Table
-Object <|-- Column
-
-class DB {
- name : String
- owner : String
-}
-
-class StorageDescriptor {
- inputFormat : String
- outputFormat : String
-}
-
-class Column {
- name : String
- dataType : String
-}
-
-class Table {
- name: String
- db: DB
-}
-Table -> StorageDescriptor : storageDesc >
-Table -> DB : db >
-
-Column *-> StorageDescriptor : storageDesc >
-
-class LoadProcess {
- name String
-}
-LoadProcess -* Table : inputTables >
-LoadProcess -> Table : outputTable >
-
-class View {
- name String
-}
-View -* Table : inputTables >
-
-#+end_src
-
-#+CAPTION: ETL and Reporting Scenario Types
-#+LABEL: fig:sampleTypeDefs
-#+results:
-[[file:class_diagram.png]]
-
-** Example Instance Graph
-#+begin_src dot :file instanceGraph.png :cmdline -Kdot -Tpng
-digraph G {
- //size ="6 6";
- nodesep=.2;
- //rankdir=LR;
- ranksep=.25;
- node [shape=record fontsize=9];
- compound=true;
-
-subgraph cluster0 {
- style=bold;
- label = "Sales Database"; fontsize=18;
- salesDB[label="DB(sales)"]
-
- salesFact[label="Table(sales_fact)" style=filled; color="khaki"]
- salesStorage[label="Storage(text,text)"]
- sales_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
- sales_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
- sales_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
- sales_sales[label="sales" shape="circle" style=filled color="peachpuff"]
- sales_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
-
- salesFact -> salesDB;
- salesFact -> salesStorage;
- sales_time_id -> salesStorage;
- sales_product_id -> salesStorage;
- sales_customer_id -> salesStorage;
- sales_sales -> salesStorage;
- sales_sales -> sales_sales_metric;
-
- productDim[label="Table(product_dim)" style=filled; color="khaki"]
- productStorage[label="Storage(text,text)"]
- product_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
- product_product_name[label="product_name" shape="circle" style=filled color="peachpuff"]
- product_brand_name[label="brand_name" shape="circle" style=filled color="peachpuff"]
- product_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
-
- productDim -> salesDB;
- productDim -> productStorage;
- product_product_id -> productStorage;
- product_product_name -> productStorage;
- product_brand_name -> productStorage;
- productDim -> product_dimension;
-
- productDim -> salesFact [style=invis];
-
- timeDim[label="Table(time_dim)" style=filled; color="khaki"]
- timeStorage[label="Storage(text,text)"]
- time_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
- time_dayOfYear[label="day_of_year" shape="circle" style=filled color="peachpuff"]
- time_weekDay[label="week_day" shape="circle" style=filled color="peachpuff"]
- time_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
-
- timeDim -> salesDB;
- timeDim -> timeStorage;
- time_time_id -> timeStorage;
- time_dayOfYear -> timeStorage;
- time_weekDay -> timeStorage;
- timeDim -> time_dimension;
-
- timeDim -> productDim [style=invis];
-
- customerDim[label="Table(customer_dim)" style=filled; color="khaki"]
- customerStorage[label="Storage(text,text)"]
- customer_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
- customer_name[label="name" shape="circle" style=filled color="peachpuff"]
- customer_address[label="address" shape="circle" style=filled color="peachpuff"]
- customer_dimension[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
- address_pii[label="PII" style=filled; shape="ellipse" color="turquoise"]
-
- customerDim -> salesDB;
- customerDim -> customerStorage;
- customer_customer_id -> customerStorage;
- customer_name -> customerStorage;
- customer_address -> customerStorage;
- customerDim -> customer_dimension;
- customer_address -> address_pii;
-
- customerDim -> timeDim [style=invis];
-
- //{rank=min; salesDB};
- {rank=min; salesDB};
-};
-
-subgraph cluster1 {
- style=bold;
- label = "Reporting Database"; fontsize=18;
-
- reportingDB[label="DB(reporting)"]
- salesFactDaily[label="Table(sales_daily_mv)" style=filled; color="khaki"]
- salesDailyStorage[label="Storage(orc,orc)"]
- salesD_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
- salesD_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
- salesD_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
- salesD_sales[label="sales" shape="circle" style=filled color="peachpuff"]
- salesD_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
-
- salesFactDaily -> reportingDB;
- salesFactDaily -> salesDailyStorage;
- salesD_time_id -> salesDailyStorage;
- salesD_product_id -> salesDailyStorage;
- salesD_customer_id -> salesDailyStorage;
- salesD_sales -> salesDailyStorage;
- salesD_sales -> salesD_sales_metric;
-
- salesFactDaily -> reportingDB [style=invis];
-
- productDimView[label="View(product_dim_v)" style=filled; color="khaki"]
- productDim -> productDimView [style=dotted];
- productDimView_dim[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
- productDimView_jdbc[label="JdbcAccess" style=filled; shape="ellipse" color="turquoise"]
- productDimView -> productDimView_dim;
- productDimView -> productDimView_jdbc;
- productDimView -> salesFactDaily [style=invis];
-
- customerDimView[label="View(customer_dim_v)" style=filled; color="khaki"]
- customerDim -> customerDimView [style=dotted];
- customerDimView_dim[label="Dimension" style=filled; shape="ellipse" color="turquoise"]
- customerDimView_jdbc[label="JdbcAccess" style=filled; shape="ellipse" color="turquoise"]
- customerDimView -> customerDimView_dim;
- customerDimView -> customerDimView_jdbc;
- customerDimView -> salesFactDaily [style=invis];
-
- salesMonthly[label="Table(sales_monthly_mv)" style=filled; color="khaki"]
- salesMonthlyStorage[label="Storage(orc,orc)"]
- salesM_time_id[label="time_id" shape="circle" style=filled color="peachpuff"]
- salesM_product_id[label="product_id" shape="circle" style=filled color="peachpuff"]
- salesM_customer_id[label="customer_id" shape="circle" style=filled color="peachpuff"]
- salesM_sales[label="sales" shape="circle" style=filled color="peachpuff"]
- salesM_sales_metric[label="Metric" style=filled; shape="ellipse" color="turquoise"]
-
- salesMonthly -> reportingDB;
- salesMonthly -> salesMonthlyStorage;
- salesM_time_id -> salesMonthlyStorage;
- salesM_product_id -> salesMonthlyStorage;
- salesM_customer_id -> salesMonthlyStorage;
- salesM_sales -> salesMonthlyStorage;
- salesM_sales -> salesM_sales_metric;
-
- salesMonthly -> customerDimView [style=invis];
-
- {rank=min; reportingDB};
-
-};
-
- loadSalesDaily[label="LoadProcess(loadSalesDaily)" style=filled; color="seagreen"; shape="octagon"]
- loadSalesDaily_etl[label="ETL" style=filled; shape="ellipse" color="turquoise"]
- salesFact -> loadSalesDaily [style=dotted];
- timeDim -> loadSalesDaily [style=dotted];
- loadSalesDaily -> salesFactDaily [style=dotted];
- loadSalesDaily -> loadSalesDaily_etl;
-
- loadSalesMonthly[label="LoadProcess(loadSalesMonthly)" style=filled; color="seagreen"; shape="octagon"]
- loadSalesMonthly_etl[label="ETL" style=filled; shape="ellipse" color="turquoise"]
- salesFactDaily -> loadSalesMonthly [style=dotted];
- timeDim -> loadSalesMonthly [style=dotted];
- loadSalesMonthly -> salesMonthly [style=dotted];
- loadSalesMonthly -> loadSalesMonthly_etl;
-
-}
-#+end_src
-
-#+CAPTION: ETL and Reporting Scenario
-#+LABEL: fig:sampleInstanceGraph
-#+results:
-[[file:instanceGraph.png]]
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/doc/notes.MD
----------------------------------------------------------------------
diff --git a/typesystem/doc/notes.MD b/typesystem/doc/notes.MD
deleted file mode 100755
index 977d46c..0000000
--- a/typesystem/doc/notes.MD
+++ /dev/null
@@ -1,78 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-## Call with Harish on the initial typesystem naming conventions and intentions
-
-### Structs are like C structs - they don't have an identity
-- no independent lifecycle
-- like a bag of properties
-- like in hive, also
-
-### Classes are classes
-- like any OO class
-- have identity
-- can have inheritence
-- can contain structs
-- don't necessarily need to use a struct inside the class to define props
-- can also define props using AttributeDefinition using the basic data types
-- classes are immutable once created
-
-### On search interface:
-- can search for all instances of a class
-- classes could become tables in a relational system, for instance
- - also databases, columns, etc.
-
-### Traits is similar to scala - traits more like decorators (?)
-- traits get applied to instances - not classes
- - this satisfies the classification mechanism (ish)
-- can have a class instance have any number of traits
-- e.g. security clearance - any Person class could have it; so we add it as a mixin to the Person class
- - security clearance trait has a level attribute
- - traits are labels
- - each label can have its own attribute
-- reason for doing this is:
- - modeled security clearance trait
- - want to prescribe it to other things, too
- - can now search for anything that has security clearance level = 1, for instance
-
-### On Instances:
-- class, trait, struct all have bags of attributes
-- can get name of type associated with attribute
-- can get or set the attribute in that bag for each instance
-
-### On Classification:
-- create column as a class
-- create a trait to classify as "PHI"
-- would create the instance of the column with the PHI trait
-- apply traits to instances
-- CAN'T apply traits to class
-
-### Other useful information
-
-HierarchicalClassType - base type for ClassType and TraitType
-Instances created from Definitions
-
-Every instance is referenceable - i.e. something can point to it in the graph db
-MetadataService may not be used longterm - it is currently used for bootstrapping the repo & type system
-
-Id class - represents the Id of an instance
-
-When the web service receives an object graph, the ObjectGraphWalker is used to update things
- - DiscoverInstances is used to discover the instances in the object graph received by the web service
-
-MapIds assigns new ids to the discovered instances in the object graph
-
-Anything under the storage package is not part of the public interface
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/pom.xml
----------------------------------------------------------------------
diff --git a/typesystem/pom.xml b/typesystem/pom.xml
deleted file mode 100755
index 78cacb7..0000000
--- a/typesystem/pom.xml
+++ /dev/null
@@ -1,165 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one
- ~ or more contributor license agreements. See the NOTICE file
- ~ distributed with this work for additional information
- ~ regarding copyright ownership. The ASF licenses this file
- ~ to you under the Apache License, Version 2.0 (the
- ~ "License"); you may not use this file except in compliance
- ~ with the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.apache.atlas</groupId>
- <artifactId>apache-atlas</artifactId>
- <version>1.0.0-SNAPSHOT</version>
- </parent>
- <artifactId>atlas-typesystem</artifactId>
- <description>Apache Atlas Typesystem Module</description>
- <name>Apache Atlas Typesystem</name>
- <packaging>jar</packaging>
-
- <dependencies>
- <dependency>
- <groupId>org.codehaus.jettison</groupId>
- <artifactId>jettison</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- </dependency>
-
- <dependency>
- <groupId>com.github.nscala-time</groupId>
- <artifactId>nscala-time_${scala.binary.version}</artifactId>
- </dependency>
-
- <dependency>
- <groupId>com.typesafe</groupId>
- <artifactId>config</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.testng</groupId>
- <artifactId>testng</artifactId>
- </dependency>
-
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- </dependency>
-
- <dependency>
- <groupId>it.unimi.dsi</groupId>
- <artifactId>fastutil</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-common</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-lang3</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-intg</artifactId>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <scalaVersion>${scala.version}</scalaVersion>
- <recompileMode>incremental</recompileMode>
- <useZincServer>true</useZincServer>
- <source>1.7</source>
- <target>1.7</target>
- <args>
- <arg>-unchecked</arg>
- <arg>-deprecation</arg>
- <arg>-feature</arg>
- </args>
- <jvmArgs>
- <jvmArg>-Xmx512m</jvmArg>
- </jvmArgs>
- </configuration>
- </plugin>
-
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.4</version>
- <executions>
- <execution>
- <goals>
- <goal>test-jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
- </plugins>
- </build>
-</project>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/classification/InterfaceAudience.java b/typesystem/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
deleted file mode 100755
index ae162ac..0000000
--- a/typesystem/src/main/java/org/apache/atlas/classification/InterfaceAudience.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.classification;
-
-import java.lang.annotation.Documented;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-
-/**
- * Annotation to mark methods for consumption.
- */
-@InterfaceAudience.Public
-public class InterfaceAudience {
- private InterfaceAudience() {
- }
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Private {
- }
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface LimitedPrivate {
- String[] value();
- }
-
- @Documented
- @Retention(RetentionPolicy.RUNTIME)
- public @interface Public {
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/IInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/IInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/IInstance.java
deleted file mode 100755
index adb28d4..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/IInstance.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-import org.apache.atlas.AtlasException;
-
-import java.util.Map;
-
-/**
- * Represents a Struct or Trait or Object.
- */
-public interface IInstance {
-
- String getTypeName();
-
- Object get(String attrName) throws AtlasException;
-
- void set(String attrName, Object val) throws AtlasException;
-
- void setNull(String attrName) throws AtlasException;
-
- Map<String, Object> getValuesMap() throws AtlasException;
-
- String toShortString();
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/IReferenceableInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/IReferenceableInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/IReferenceableInstance.java
deleted file mode 100755
index 04af67c..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/IReferenceableInstance.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.typesystem.persistence.AtlasSystemAttributes;
-import org.apache.atlas.typesystem.persistence.Id;
-
-/**
- * Represents and instance of a ClassType. These have identity.
- * Transient instances will have a UNASSIGNED identity.
- */
-public interface IReferenceableInstance extends IStruct {
-
- ImmutableList<String> getTraits();
-
- Id getId();
-
- IStruct getTrait(String typeName);
-
- AtlasSystemAttributes getSystemAttributes();
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/IStruct.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/IStruct.java b/typesystem/src/main/java/org/apache/atlas/typesystem/IStruct.java
deleted file mode 100755
index e0f8576..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/IStruct.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-/**
- * A marker interface for StructType and TraitType instances.
- */
-public interface IStruct extends IInstance {
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedInstance.java
deleted file mode 100755
index c3bb9af..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedInstance.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.FieldMapping;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.security.MessageDigest;
-import java.util.Date;
-
-/**
- * An instance whose structure is associated with a IDataType.
- * This is obtained by a call to 'createInstance' or the result of a Query.
- * A ITypedInstance can only contain information on attributes of the associated Type.
- * Instance can still be invalid because of missing required fields or incorrect multiplicity.
- * But user can only get/set on a known field of the associated type. Type values have to match
- * the IDataType of the associated attribute.
- */
-public interface ITypedInstance extends IInstance {
-
- FieldMapping fieldMapping();
-
- boolean getBoolean(String attrName) throws AtlasException;
-
- byte getByte(String attrName) throws AtlasException;
-
- short getShort(String attrName) throws AtlasException;
-
- int getInt(String attrName) throws AtlasException;
-
- long getLong(String attrName) throws AtlasException;
-
- float getFloat(String attrName) throws AtlasException;
-
- double getDouble(String attrName) throws AtlasException;
-
- BigInteger getBigInt(String attrName) throws AtlasException;
-
- BigDecimal getBigDecimal(String attrName) throws AtlasException;
-
- Date getDate(String attrName) throws AtlasException;
-
- String getString(String attrName) throws AtlasException;
-
- void setBoolean(String attrName, boolean val) throws AtlasException;
-
- void setByte(String attrName, byte val) throws AtlasException;
-
- void setShort(String attrName, short val) throws AtlasException;
-
- void setInt(String attrName, int val) throws AtlasException;
-
- void setLong(String attrName, long val) throws AtlasException;
-
- void setFloat(String attrName, float val) throws AtlasException;
-
- void setDouble(String attrName, double val) throws AtlasException;
-
- void setBigInt(String attrName, BigInteger val) throws AtlasException;
-
- void setBigDecimal(String attrName, BigDecimal val) throws AtlasException;
-
- void setDate(String attrName, Date val) throws AtlasException;
-
- void setString(String attrName, String val) throws AtlasException;
-
- String getSignatureHash(MessageDigest digester) throws AtlasException;
-
- boolean isValueSet(String attrName) throws AtlasException;
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedReferenceableInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedReferenceableInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedReferenceableInstance.java
deleted file mode 100755
index 6dae95c..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedReferenceableInstance.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-public interface ITypedReferenceableInstance extends ITypedInstance, IReferenceableInstance {
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedStruct.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedStruct.java b/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedStruct.java
deleted file mode 100755
index ced255c..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/ITypedStruct.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-public interface ITypedStruct extends IStruct, ITypedInstance {
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/Referenceable.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/Referenceable.java b/typesystem/src/main/java/org/apache/atlas/typesystem/Referenceable.java
deleted file mode 100755
index 8cad2f8..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/Referenceable.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.classification.InterfaceAudience;
-import org.apache.atlas.typesystem.persistence.AtlasSystemAttributes;
-import org.apache.atlas.typesystem.persistence.Id;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-
-/**
- * Represents a Class Instance that has not been associated with a FieldMapping.
- */
-public class Referenceable extends Struct implements IReferenceableInstance {
-
- private Id id;
- private final ImmutableMap<String, IStruct> traits;
- private final ImmutableList<String> traitNames;
- private AtlasSystemAttributes systemAttributes;
-
- public Referenceable(String typeName, String... traitNames) {
- super(typeName);
- id = new Id(typeName);
- this.traitNames = ImmutableList.copyOf(traitNames);
- ImmutableMap.Builder<String, IStruct> b = new ImmutableMap.Builder<>();
- for (String t : traitNames) {
- b.put(t, new Struct(t));
- }
- traits = b.build();
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- public Referenceable(String typeName, Map<String, Object> values) {
- super(typeName, values);
- id = new Id(typeName);
- traitNames = ImmutableList.of();
- traits = ImmutableMap.of();
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- public Referenceable(String guid, String typeName, Map<String, Object> values) {
- super(typeName, values);
- id = new Id(guid, 0, typeName);
- traitNames = ImmutableList.of();
- traits = ImmutableMap.of();
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- /**
- * Not public - only use during deserialization
- * @param guid the unique id
- * @param typeName the type name
- * @param values the entity attribute values
- */
- @InterfaceAudience.Private
- public Referenceable(String guid, String typeName, Map<String, Object> values, List<String> _traitNames,
- Map<String, IStruct> _traits) {
- super(typeName, values);
- id = new Id(guid, 0, typeName);
- traitNames = ImmutableList.copyOf(_traitNames);
- traits = ImmutableMap.copyOf(_traits);
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- /**
- * Not public - only use during deserialization
- * @param id entity id
- * @param typeName the type name
- * @param values the entity attribute values
- */
- @InterfaceAudience.Private
- public Referenceable(Id id, String typeName, Map<String, Object> values, List<String> _traitNames,
- Map<String, IStruct> _traits) {
- super(typeName, values);
- this.id = id;
- traitNames = ImmutableList.copyOf(_traitNames);
- traits = ImmutableMap.copyOf(_traits);
- this.systemAttributes = new AtlasSystemAttributes();
- }
-
- /**
- * Not public - only use during deserialization
- * @param id entity id
- * @param typeName the type name
- * @param values the entity attribute values
- */
- @InterfaceAudience.Private
- public Referenceable(Id id, String typeName, Map<String, Object> values, List<String> _traitNames,
- Map<String, IStruct> _traits, AtlasSystemAttributes systemAttributes) {
- super(typeName, values);
- this.id = id;
- traitNames = ImmutableList.copyOf(_traitNames);
- traits = ImmutableMap.copyOf(_traits);
- this.systemAttributes = systemAttributes;
- }
-
- /**
- * Construct a Referenceable from the given IReferenceableInstance.
- *
- * @param instance the referenceable instance to copy
- *
- * @throws AtlasException if the referenceable can not be created
- */
- public Referenceable(IReferenceableInstance instance) throws AtlasException {
- this(instance.getId(), instance.getTypeName(), instance.getValuesMap(), instance.getTraits(),
- getTraits(instance));
- }
-
- /**
- * No-arg constructor for serialization.
- */
- @SuppressWarnings("unused")
- private Referenceable() {
- super(null, null);
- id = null;
- traitNames = ImmutableList.of();
- traits = ImmutableMap.of();
- }
-
- @Override
- public ImmutableList<String> getTraits() {
- return traitNames;
- }
-
- @Override
- public Id getId() {
- return id;
- }
-
- @Override
- public IStruct getTrait(String typeName) {
- return traits.get(typeName);
- }
-
- @Override
- public AtlasSystemAttributes getSystemAttributes(){
- return systemAttributes;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass() || !super.equalsContents(o)) return false;
- Referenceable that = (Referenceable) o;
- return Objects.equals(id, that.id) &&
- Objects.equals(traits, that.traits) &&
- Objects.equals(traitNames, that.traitNames);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(super.hashCode(), id, traits, traitNames);
- }
-
- /**
- * Matches traits, values associated with this Referenceable and skips the id match
- * @param o The Referenceable which needs to be matched with
- * @return
- */
- public boolean equalsContents(Object o) {
- if(this == o) {
- return true;
- }
- if(o == null) {
- return false;
- }
- if (o.getClass() != getClass()) {
- return false;
- }
-
- if(!super.equalsContents(o)) {
- return false;
- }
-
- Referenceable obj = (Referenceable)o;
- if (!traitNames.equals(obj.getTraits())) {
- return false;
- }
-
- return true;
- }
-
- public String toString() {
- return "{" +
- "Id='" + id + '\'' +
- ", traits=" + traitNames +
- ", values=" + getValuesMap() +
- '}';
- }
-
- @Override
- public String toShortString() {
- return String.format("entity[type=%s guid=%s]", typeName, id._getId());
- }
-
- public void replaceWithNewId(Id id) {
- this.id = id;
- }
-
- private static Map<String, IStruct> getTraits(IReferenceableInstance instance) throws AtlasException {
- Map<String, IStruct> traits = new HashMap<>();
- for (String traitName : instance.getTraits() ) {
- traits.put(traitName, new Struct(traitName, instance.getTrait(traitName).getValuesMap()));
- }
- return traits;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/Struct.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/Struct.java b/typesystem/src/main/java/org/apache/atlas/typesystem/Struct.java
deleted file mode 100755
index f8d2e42..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/Struct.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.classification.InterfaceAudience;
-
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-
-public class Struct implements IStruct {
-
- public final String typeName;
- private final Map<String, Object> values;
-
- public Struct(String typeName) {
- this.typeName = typeName;
- values = new HashMap<>();
- }
-
- @InterfaceAudience.Private
- public Struct(String typeName, Map<String, Object> values) {
- this(typeName);
- if (values != null) {
- this.values.putAll(values);
- }
- }
-
- /**
- * No-arg constructor for serialization.
- */
- @SuppressWarnings("unused")
- private Struct() {
- this("", Collections.<String, Object>emptyMap());
- }
-
-
- @Override
- public String getTypeName() {
- return typeName;
- }
-
- @Override
- public Object get(String attrName) {
- return values.get(attrName);
- }
-
- @Override
- public void set(String attrName, Object value) {
- values.put(attrName, value);
- }
-
- @Override
- public void setNull(String attrName) throws AtlasException {
- values.put(attrName, null);
- }
-
- @Override
- public Map<String, Object> getValuesMap() {
- return values;
- }
-
- @Override
- public String toShortString() {
- return String.format("struct[type=%s]", typeName);
- }
-
- @Override
- public int hashCode() {
- int result = typeName.hashCode();
- result = 31 * result + values.hashCode();
- return result;
- }
-
- /**
- * equalContents instead of equals since values is a mutable attribute and could lead
- * to incorrect behaviour when added to collections and mutated after that
- * i.e when the attribute is mutated collections.contains(struct) returns false
- * due to hashcode having changed for the struct.
- * @param o
- * @return
- */
- public boolean equalsContents(Object o) {
- if (this == o) {
- return true;
- }
-
- if (o == null) {
- return false;
- }
-
- if (o.getClass() != getClass()) {
- return false;
- }
-
- Struct obj = (Struct)o;
- if(!typeName.equals(obj.getTypeName())) {
- return false;
- }
-
- if(!values.equals(obj.getValuesMap())) {
- return false;
- }
-
- return true;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeExistsException.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeExistsException.java b/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeExistsException.java
deleted file mode 100644
index 8a28e38..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeExistsException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-import org.apache.atlas.AtlasException;
-
-public class TypeExistsException extends AtlasException {
- public TypeExistsException(String message) {
- super(message);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeNotFoundException.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeNotFoundException.java b/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeNotFoundException.java
deleted file mode 100644
index 3654a4b..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/exception/TypeNotFoundException.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.exception;
-
-import org.apache.atlas.AtlasException;
-
-/**
- * A simple wrapper for 404.
- */
-public class TypeNotFoundException extends AtlasException {
- public TypeNotFoundException() {
- }
-
- public TypeNotFoundException(String message) {
- super(message);
- }
-
- public TypeNotFoundException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public TypeNotFoundException(Throwable cause) {
- super(cause);
- }
-
- public TypeNotFoundException(String message, Throwable cause, boolean enableSuppression,
- boolean writableStackTrace) {
- super(message, cause, enableSuppression, writableStackTrace);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/AtlasSystemAttributes.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/AtlasSystemAttributes.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/AtlasSystemAttributes.java
deleted file mode 100644
index 3c08a02..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/AtlasSystemAttributes.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.persistence;
-
-import org.apache.atlas.typesystem.types.TypeSystem;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-public class AtlasSystemAttributes {
- public String createdBy;
- public String modifiedBy;
- public Date createdTime;
- public Date modifiedTime;
- public SimpleDateFormat simpleDateFormat = TypeSystem.getInstance().getDateFormat();
-
-
- public AtlasSystemAttributes(String createdBy, String modifiedBy, Date createdTime, Date modifiedTime){
- this.createdBy = createdBy;
- this.modifiedBy = modifiedBy;
- this.createdTime = createdTime;
- this.modifiedTime = modifiedTime;
- }
-
- public AtlasSystemAttributes(){
- super();
- }
-
- public AtlasSystemAttributes(String createdBy, String modifiedBy, String createdTime, String modifiedTime){
- this.createdBy = createdBy;
- this.modifiedBy = modifiedBy;
-
- try{
- this.createdTime = simpleDateFormat.parse(createdTime);
- }catch (ParseException e){
- //this.createdTime = new Date(0);
- }
-
- try{
- this.modifiedTime = simpleDateFormat.parse(modifiedTime);
- }catch (ParseException e){
- //this.modifiedTime = new Date(0);
- }
- }
-
-
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
-
- AtlasSystemAttributes sys_attr = (AtlasSystemAttributes) o;
-
- if (!createdBy.equals(sys_attr.createdBy)) {
- return false;
- }
- if (!modifiedBy.equals(sys_attr.modifiedBy)) {
- return false;
- }
- if (!createdTime.equals(sys_attr.createdTime)) {
- return false;
- }
-
- if(!modifiedTime.equals(sys_attr.modifiedTime)){
- return false;
- }
- return true;
- }
-
- @Override
- public int hashCode() {
- int result = createdBy.hashCode();
- result = 31 * result + modifiedBy.hashCode();
- result = 31 * result + createdTime.hashCode();
- result = 31 * result + modifiedTime.hashCode();
- return result;
- }
-
- public String getCreatedBy(){
- return createdBy;
- }
-
- public String getModifiedBy(){
- return modifiedBy;
- }
-
- public Date getCreatedTime(){
- return createdTime;
- }
-
- public Date getModifiedTime(){
- return modifiedTime;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/DownCastStructInstance.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/DownCastStructInstance.java b/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/DownCastStructInstance.java
deleted file mode 100755
index e4ebb17..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/persistence/DownCastStructInstance.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.persistence;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.types.DownCastFieldMapping;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class DownCastStructInstance implements IStruct {
-
- public final String typeName;
- public final DownCastFieldMapping fieldMapping;
- public final IStruct backingInstance;
-
- public DownCastStructInstance(String typeName, DownCastFieldMapping fieldMapping, IStruct backingInstance) {
- this.typeName = typeName;
- this.fieldMapping = fieldMapping;
- this.backingInstance = backingInstance;
- }
-
- @Override
- public String getTypeName() {
- return typeName;
- }
-
- @Override
- public Object get(String attrName) throws AtlasException {
- return fieldMapping.get(this, attrName);
- }
-
- @Override
- public void set(String attrName, Object val) throws AtlasException {
- fieldMapping.set(this, attrName, val);
- }
-
- @Override
- public void setNull(String attrName) throws AtlasException {
- throw new UnsupportedOperationException("unset on attributes are not allowed");
- }
-
- /*
- * Use only for json serialization
- * @nonpublic
- */
- @Override
- public Map<String, Object> getValuesMap() throws AtlasException {
-
- Map<String, Object> m = new HashMap<>();
- for (String attr : fieldMapping.fieldNameMap.keySet()) {
- m.put(attr, get(attr));
- }
- return m;
- }
-
- @Override
- public String toShortString() {
- return toString();
- }
-}
-
-
[07/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala
deleted file mode 100755
index 4478a44..0000000
--- a/typesystem/src/main/scala/org/apache/atlas/typesystem/json/TypesSerialization.scala
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json
-
-import java.text.SimpleDateFormat
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.AtlasException
-import org.apache.atlas.AtlasConstants
-import org.apache.atlas.typesystem.TypesDef
-import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, MapType, TypeCategory}
-import org.apache.atlas.typesystem.types._
-import org.json4s.JsonAST.JString
-import org.json4s._
-import org.json4s.native.Serialization._
-import com.google.common.collect.ImmutableSet
-
-
-/**
- * Module for serializing to/from Json.
- *
- * @example {{{
- * val j = TypesSerialization.toJson(typeSystem, "Employee", "Person", "Department", "SecurityClearance")
- *
- * val typesDef = TypesSerialization.fromJson(jsonStr)
- * typesDef.enumTypes.foreach( typeSystem.defineEnumType(_))
-
- typeSystem.defineTypes(ImmutableList.copyOf(typesDef.structTypes.toArray),
- ImmutableList.copyOf(typesDef.traitTypes.toArray),
- ImmutableList.copyOf(typesDef.classTypes.toArray)
- )
- * }}}
- *
- * @todo doesn't traverse includes directives. Includes are parsed into
- * [[org.apache.atlas.tools.thrift.IncludeDef IncludeDef]] structures
- * but are not traversed.
- * @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to
- * change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on
- * performance analysis.
- * @todo Error reporting
- */
-object TypesSerialization {
-
- def toJsonValue(typ: IDataType[_])(implicit formats: Formats): JValue = {
- typ.getTypeCategory match {
- case TypeCategory.CLASS => {
- Extraction.decompose(convertClassTypeToHierarchicalTypeDefinition(typ.asInstanceOf[ClassType]))
- }
- case TypeCategory.STRUCT => {
- Extraction.decompose(convertStructTypeToStructDef(typ.asInstanceOf[StructType]))
- }
- case TypeCategory.TRAIT => {
- Extraction.decompose(convertTraitTypeToHierarchicalTypeDefinition(typ.asInstanceOf[TraitType]))
- }
- case TypeCategory.ENUM => {
- Extraction.decompose(convertEnumTypeToEnumTypeDef(typ.asInstanceOf[EnumType]))
- }
- case _ => JString(s"${typ.getName}")
- }
- }
-
- def toJson(ts: TypeSystem, typName: String): String = {
- toJson(ts, List(typName): _*)
- }
-
- def toJson(ts: TypeSystem, typNames: String*): String = {
- toJson(ts, (typ: IDataType[_]) => typNames.contains(typ.getName))
- }
-
- import scala.collection.JavaConversions._
-
- def toJson(ts: TypeSystem, typNames: java.util.List[String]): String = {
- toJson(ts, typNames.toIndexedSeq: _*)
- }
-
- val _formats = new DefaultFormats {
- override val dateFormatter = TypeSystem.getInstance().getDateFormat.asInstanceOf[SimpleDateFormat]
- override val typeHints = NoTypeHints
- }
-
- def toJson(ts: TypeSystem, export: IDataType[_] => Boolean): String = {
- implicit val formats = _formats + new MultiplicitySerializer
-
- val typsDef = convertToTypesDef(ts, export)
-
- writePretty(typsDef)
- }
-
- def fromJson(jsonStr: String): TypesDef = {
- implicit val formats = _formats + new MultiplicitySerializer
-
- read[TypesDef](jsonStr)
- }
-
- def toJson(typesDef : TypesDef) : String = {
- implicit val formats = _formats + new MultiplicitySerializer
- writePretty(typesDef)
-
- }
-
- def toJson(enumTypeDefinition: EnumTypeDefinition) : String = {
- toJson(new TypesDef(enumTypeDefinition))
- }
-
- def toJson(structTypeDefinition: StructTypeDefinition) : String = {
- toJson(new TypesDef(structTypeDefinition))
- }
-
- def toJson(typDef: HierarchicalTypeDefinition[_], isTrait : Boolean) : String = {
- toJson(new TypesDef(typDef, isTrait))
- }
-
- private def convertAttributeInfoToAttributeDef(aInfo: AttributeInfo) = {
- new AttributeDefinition(aInfo.name, aInfo.dataType().getName, aInfo.multiplicity,
- aInfo.isComposite, aInfo.isUnique, aInfo.isIndexable, aInfo.reverseAttributeName)
- }
-
- private def convertEnumTypeToEnumTypeDef(et: EnumType) = {
- val eVals: Seq[EnumValue] = et.valueMap.values().toSeq
- new EnumTypeDefinition(et.name, et.description, et.version, eVals: _*)
- }
-
- private def convertStructTypeToStructDef(st: StructType): StructTypeDefinition = {
-
- val aDefs: Iterable[AttributeDefinition] =
- st.fieldMapping.fields.values().map(convertAttributeInfoToAttributeDef(_))
- new StructTypeDefinition(st.name, st.description, st.version, aDefs.toArray)
- }
-
- private def convertTraitTypeToHierarchicalTypeDefinition(tt: TraitType): HierarchicalTypeDefinition[TraitType] = {
-
- val aDefs: Iterable[AttributeDefinition] =
- tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_))
- new HierarchicalTypeDefinition[TraitType](classOf[TraitType], tt.name, tt.description, tt.version, tt.superTypes, aDefs.toArray)
- }
-
- private def convertClassTypeToHierarchicalTypeDefinition(tt: ClassType): HierarchicalTypeDefinition[ClassType] = {
-
- val aDefs: Iterable[AttributeDefinition] =
- tt.immediateAttrs.map(convertAttributeInfoToAttributeDef(_))
- new HierarchicalTypeDefinition[ClassType](classOf[ClassType], tt.name, tt.description, tt.version, tt.superTypes, aDefs.toArray)
- }
-
- def convertToTypesDef(ts: TypeSystem, export: IDataType[_] => Boolean): TypesDef = {
-
- var enumTypes: Seq[EnumTypeDefinition] = Nil
- var structTypes: Seq[StructTypeDefinition] = Nil
- var traitTypes: Seq[HierarchicalTypeDefinition[TraitType]] = Nil
- var classTypes: Seq[HierarchicalTypeDefinition[ClassType]] = Nil
-
- def toTyp(nm: String) = ts.getDataType(classOf[IDataType[_]], nm)
-
- val typs: Iterable[IDataType[_]] = ts.getTypeNames.map(toTyp(_)).filter { (typ: IDataType[_]) =>
- !(ts.getCoreTypes.contains(typ.getName)) && export(typ)
- }
-
- typs.foreach {
- case typ: ArrayType => ()
- case typ: MapType => ()
- case typ: EnumType => enumTypes = enumTypes :+ convertEnumTypeToEnumTypeDef(typ)
- case typ: StructType => structTypes = structTypes :+ convertStructTypeToStructDef(typ)
- case typ: TraitType => traitTypes = traitTypes :+ convertTraitTypeToHierarchicalTypeDefinition(typ)
- case typ: ClassType => classTypes = classTypes :+ convertClassTypeToHierarchicalTypeDefinition(typ)
- }
-
- TypesDef(enumTypes, structTypes, traitTypes, classTypes)
- }
-
-}
-
-class MultiplicitySerializer extends CustomSerializer[Multiplicity](format => ( {
- case JString(m) => m match {
- case "optional" => Multiplicity.OPTIONAL
- case "required" => Multiplicity.REQUIRED
- case "collection" => Multiplicity.COLLECTION
- case "set" => Multiplicity.SET
- }
-}, {
- case m: Multiplicity => JString(m match {
- case Multiplicity.OPTIONAL => "optional"
- case Multiplicity.REQUIRED => "required"
- case Multiplicity.COLLECTION => "collection"
- case Multiplicity.SET => "set"
- }
-
- )
-}
- ))
-
-trait TypeHelpers {
- def requiredAttr(name: String, dataType: IDataType[_]) =
- new AttributeDefinition(name, dataType.getName, Multiplicity.REQUIRED, false, null)
-
- def optionalAttr(name: String, dataTypeName: String) =
- new AttributeDefinition(name, dataTypeName, Multiplicity.OPTIONAL, false, null)
-
-
- def optionalAttr(name: String, dataType: IDataType[_]) =
- new AttributeDefinition(name, dataType.getName, Multiplicity.OPTIONAL, false, null)
-
- def structDef(name: String, attrs: AttributeDefinition*):
- StructTypeDefinition = {
- structDef(name, None, attrs:_*)
- }
-
- def structDef(name: String, description: Option[String], attrs: AttributeDefinition*) = {
- new StructTypeDefinition(name, description.getOrElse(null), attrs.toArray)
- }
-
- def defineTraits(ts: TypeSystem, tDefs: HierarchicalTypeDefinition[TraitType]*) = {
- ts.defineTraitTypes(tDefs: _*)
- }
-
- def createTraitTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[TraitType] = {
- createTraitTypeDef(name, None, superTypes, attrDefs:_*)
- }
-
- def createTraitTypeDef(name: String, description: Option[String], superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[TraitType] = {
- createTraitTypeDef(name, None, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attrDefs:_*)
- }
-
- def createTraitTypeDef(name: String, description: Option[String], version: String,superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[TraitType] = {
- val sts = ImmutableSet.copyOf(superTypes.toArray)
- return new HierarchicalTypeDefinition[TraitType](classOf[TraitType], name, description.getOrElse(null),
- sts, attrDefs.toArray)
- }
-
- def createClassTypeDef(name: String, superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[ClassType] = {
- createClassTypeDef( name, None, superTypes, attrDefs:_*)
- }
-
- def createClassTypeDef(name: String, description: Option[String], superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[ClassType] = {
- createClassTypeDef( name, None, None, superTypes, attrDefs:_*)
- }
-
- def createClassTypeDef(name: String, description: Option[String], version: Option[String], superTypes: Seq[String], attrDefs: AttributeDefinition*):
- HierarchicalTypeDefinition[ClassType] = {
- val sts = ImmutableSet.copyOf(superTypes.toArray)
- return new HierarchicalTypeDefinition[ClassType](classOf[ClassType], name, description.getOrElse(null), AtlasConstants.DEFAULT_TYPE_VERSION, sts, attrDefs.toArray)
- }
-
-
- @throws(classOf[AtlasException])
- def defineClassType(ts: TypeSystem, classDef: HierarchicalTypeDefinition[ClassType]): ClassType = {
- ts.defineTypes(ImmutableList.of[EnumTypeDefinition], ImmutableList.of[StructTypeDefinition],
- ImmutableList.of[HierarchicalTypeDefinition[TraitType]],
- ImmutableList.of[HierarchicalTypeDefinition[ClassType]](classDef))
- return ts.getDataType(classOf[ClassType], classDef.typeName)
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/ApplicationPropertiesTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/ApplicationPropertiesTest.java b/typesystem/src/test/java/org/apache/atlas/ApplicationPropertiesTest.java
deleted file mode 100644
index 25a19cf..0000000
--- a/typesystem/src/test/java/org/apache/atlas/ApplicationPropertiesTest.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.commons.configuration.Configuration;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-import static org.testng.Assert.assertEquals;
-
-public class ApplicationPropertiesTest {
-
- @Test
- public void testVariables() throws Exception {
- Configuration properties = ApplicationProperties.get(ApplicationProperties.APPLICATION_PROPERTIES);
-
- //plain property without variables
- assertEquals(properties.getString("atlas.service"), "atlas");
-
- //property containing system property
- String data = System.getProperty("user.dir") + "/target/data";
- assertEquals(properties.getString("atlas.data"), data);
-
- //property referencing other property
- assertEquals(properties.getString("atlas.graph.data"), data + "/graph");
-
- //invalid system property - not substituted
- assertEquals(properties.getString("atlas.db"), "${atlasdb}");
- }
-
- @Test
- //variable substitutions should work with subset configuration as well
- public void testSubset() throws Exception {
- Configuration configuration = ApplicationProperties.get(ApplicationProperties.APPLICATION_PROPERTIES);
- Configuration subConfiguration = configuration.subset("atlas");
-
- assertEquals(subConfiguration.getString("service"), "atlas");
- String data = System.getProperty("user.dir") + "/target/data";
- assertEquals(subConfiguration.getString("data"), data);
- assertEquals(subConfiguration.getString("graph.data"), data + "/graph");
- }
-
- @Test
- public void testGetClass() throws Exception {
- Configuration configuration = ApplicationProperties.get();
-
- //read from atlas-application.properties
- Class cls = ApplicationProperties.getClass(configuration, "atlas.TypeSystem.impl",
- ApplicationProperties.class.getName(), TypeSystem.class);
- assertEquals(cls.getName(), TypeSystem.class.getName());
-
- //default value
- cls = ApplicationProperties.getClass(configuration, "atlas.TypeSystem2.impl",
- TypeSystem.class.getName(), TypeSystem.class);
- assertEquals(cls.getName(), TypeSystem.class.getName());
-
- //incompatible assignTo class, should throw AtlasException
- try {
- cls = ApplicationProperties.getClass(configuration, "atlas.TypeSystem.impl",
- ApplicationProperties.class.getName(), ApplicationProperties.class);
- Assert.fail(AtlasException.class.getSimpleName() + " was expected but none thrown.");
- }
- catch (AtlasException e) {
- // good
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/json/SerializationJavaTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/json/SerializationJavaTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/json/SerializationJavaTest.java
deleted file mode 100755
index 5ee019c..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/json/SerializationJavaTest.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.json;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.BaseTest;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-import org.testng.Assert;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
-
-public class SerializationJavaTest extends BaseTest {
-
-
- @BeforeMethod
- public void setup() throws Exception {
- super.setup();
- }
-
- /*
- * Class Hierarchy is:
- * Department(name : String, employees : Array[Person])
- * Person(name : String, department : Department, manager : Manager)
- * Manager(subordinates : Array[Person]) extends Person
- *
- * Persons can have SecurityClearance(level : Int) clearance.
- */
- @Test
- public void test1() throws AtlasException {
-
- TypeSystem ts = getTypeSystem();
-
- HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef("Department", ImmutableSet.<String>of(),
- createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
- true, "department"));
- HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", ImmutableSet.<String>of(),
- createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
- HierarchicalTypeDefinition<ClassType> managerTypeDef =
- createClassTypeDef("Manager", ImmutableSet.of("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager"));
-
- HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
- createTraitTypeDef("SecurityClearance", ImmutableSet.<String>of(),
- createRequiredAttrDef("level", DataTypes.INT_TYPE));
-
- ts.defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
-
- Referenceable hrDept = new Referenceable("Department");
- Referenceable john = new Referenceable("Person");
- Referenceable jane = new Referenceable("Manager", "SecurityClearance");
-
- hrDept.set("name", "hr");
- john.set("name", "John");
- john.set("department", hrDept);
- jane.set("name", "Jane");
- jane.set("department", hrDept);
-
- john.set("manager", jane);
-
- hrDept.set("employees", ImmutableList.of(john, jane));
-
- jane.set("subordinates", ImmutableList.of(john));
-
- jane.getTrait("SecurityClearance").set("level", 1);
-
- ClassType deptType = ts.getDataType(ClassType.class, "Department");
- ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
-
- String hrDeptStr = hrDept2.toString();
-
- Assert.assertEquals(hrDeptStr, "{\n" +
- "\tid : (type: Department, id: <unassigned>)\n" +
- "\tname : \thr\n" +
- "\temployees : \t[{\n" +
- "\tid : (type: Person, id: <unassigned>)\n" +
- "\tname : \tJohn\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : (type: Manager, id: <unassigned>)\n" +
- "}, {\n" +
- "\tid : (type: Manager, id: <unassigned>)\n" +
- "\tsubordinates : \t[{\n" +
- "\tid : (type: Person, id: <unassigned>)\n" +
- "\tname : \tJohn\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : (type: Manager, id: <unassigned>)\n" +
- "}]\n" +
- "\tname : \tJane\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : <null>\n" +
- "\n" +
- "\tSecurityClearance : \t{\n" +
- "\t\tlevel : \t\t1\n" +
- "\t}}]\n" +
- "}");
-
- String jsonStr = Serialization$.MODULE$.toJson(hrDept2);
- //System.out.println(jsonStr);
-
- hrDept2 = Serialization$.MODULE$.fromJson(jsonStr);
- Assert.assertEquals(hrDept2.toString(), hrDeptStr);
-
- }
-
- @Test
- public void testTrait() throws AtlasException {
-
- TypeSystem ts = getTypeSystem();
- HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
- createTraitTypeDef("SecurityClearance2", ImmutableSet.<String>of(),
- createRequiredAttrDef("level", DataTypes.INT_TYPE));
-
- ts.defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
-
-
- Struct s = new Struct("SecurityClearance2");
- s.set("level", 1);
- TraitType tType = ts.getDataType(TraitType.class, "SecurityClearance2");
- ITypedInstance t = tType.convert(s, Multiplicity.REQUIRED);
- String jsonStr = Serialization$.MODULE$.toJson(t);
- ITypedInstance t2 = Serialization$.MODULE$.traitFromJson(jsonStr);
- Assert.assertEquals(t.toString(), t2.toString());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/BaseTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/BaseTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/BaseTest.java
deleted file mode 100755
index 95c99e4..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/BaseTest.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.commons.lang.RandomStringUtils;
-import org.testng.annotations.BeforeMethod;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.Date;
-import java.util.Map;
-
-public abstract class BaseTest {
-
- public static final String STRUCT_TYPE_1 = "t1";
- public static final String STRUCT_TYPE_2 = "t2";
- public static final String STRUCT_TYPE_3 = "t3";
- public static final String TEST_DATE = "2014-12-11T02:35:58.440Z";
-
- public static Struct createStruct() throws AtlasException {
- StructType structType = TypeSystem.getInstance().getDataType(StructType.class, STRUCT_TYPE_1);
- Struct s = new Struct(structType.getName());
- s.set("a", 1);
- s.set("b", true);
- s.set("c", (byte) 1);
- s.set("d", (short) 2);
- s.set("e", 1);
- s.set("f", 1);
- s.set("g", 1L);
- s.set("h", 1.0f);
- s.set("i", 1.0);
- s.set("j", BigInteger.valueOf(1L));
- s.set("k", new BigDecimal(1));
- s.set("l", new Date(1418265358440L));
- s.set("m", Lists.asList(1, new Integer[]{1}));
- s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
- Map<String, Double> hm = Maps.newHashMap();
- hm.put("a", 1.0);
- hm.put("b", 2.0);
- s.set("o", hm);
- s.set("p", "");
- s.setNull("q");
- Map<String, String> hm2 = Maps.newHashMap();
- hm2.put("a", "");
- s.set("r", hm2);
- return s;
- }
-
- protected final TypeSystem getTypeSystem() {
- return TypeSystem.getInstance();
- }
-
- @BeforeMethod
- public void setup() throws Exception {
- TypeSystem ts = TypeSystem.getInstance();
- ts.reset();
-
- StructType structType =
- ts.defineStructType(STRUCT_TYPE_1, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
- TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
- TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
- TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
- TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
- TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
- TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
- TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
- TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
- TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
- TypesUtil.createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
- TypesUtil.createOptionalAttrDef("p", DataTypes.STRING_TYPE),
- TypesUtil.createOptionalAttrDef("q", DataTypes.STRING_TYPE),
- TypesUtil.createOptionalAttrDef("r", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE)));
- System.out.println("defined structType = " + structType);
-
- StructType recursiveStructType =
- ts.defineStructType(STRUCT_TYPE_2, true, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2));
- System.out.println("defined recursiveStructType = " + recursiveStructType);
-
- StructType invalidStructType =
- ts.defineStructType(STRUCT_TYPE_3, true, TypesUtil.createRequiredAttrDef("a",DataTypes.STRING_TYPE));
- System.out.println("defined structType = " + invalidStructType);
- }
-
- protected Map<String, IDataType> defineTraits(HierarchicalTypeDefinition<TraitType>... tDefs)
- throws AtlasException {
- return getTypeSystem().defineTraitTypes(tDefs);
- }
-
- protected Map<String, IDataType> defineClasses(HierarchicalTypeDefinition<ClassType>... classDefs)
- throws AtlasException {
- return getTypeSystem().defineClassTypes(classDefs);
- }
-
- /*
- * Class Hierarchy is:
- * Department(name : String, employees : Array[Person])
- * Person(name : String, department : Department, manager : Manager)
- * Manager(subordinates : Array[Person]) extends Person
- *
- * Persons can have SecurityClearance(level : Int) clearance.
- */
- protected void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
-
- HierarchicalTypeDefinition<ClassType> deptTypeDef = TypesUtil
- .createClassTypeDef("Department", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("employees", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, true, "department"));
- HierarchicalTypeDefinition<ClassType> personTypeDef = TypesUtil
- .createClassTypeDef("Person", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"));
- HierarchicalTypeDefinition<ClassType> managerTypeDef = TypesUtil
- .createClassTypeDef("Manager", ImmutableSet.of("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"),
- Multiplicity.COLLECTION, false, "manager"));
-
- HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef = TypesUtil
- .createTraitTypeDef("SecurityClearance", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
-
- ts.defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef));
-
- ImmutableList.of(ts.getDataType(HierarchicalType.class, "SecurityClearance"),
- ts.getDataType(ClassType.class, "Department"), ts.getDataType(ClassType.class, "Person"),
- ts.getDataType(ClassType.class, "Manager"));
- }
-
- protected Referenceable createDeptEg1(TypeSystem ts) throws AtlasException {
- Referenceable hrDept = new Referenceable("Department");
- Referenceable john = new Referenceable("Person");
- Referenceable jane = new Referenceable("Manager", "SecurityClearance");
-
- hrDept.set("name", "hr");
- john.set("name", "John");
- john.set("department", hrDept);
- jane.set("name", "Jane");
- jane.set("department", hrDept);
-
- john.set("manager", jane);
-
- hrDept.set("employees", ImmutableList.of(john, jane));
-
- jane.set("subordinates", ImmutableList.of(john));
-
- jane.getTrait("SecurityClearance").set("level", 1);
-
- ClassType deptType = ts.getDataType(ClassType.class, "Department");
- deptType.convert(hrDept, Multiplicity.REQUIRED);
-
- return hrDept;
- }
-
- protected String newName() {
- return RandomStringUtils.randomAlphanumeric(10);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/ClassTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ClassTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/ClassTest.java
deleted file mode 100755
index daecdd7..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ClassTest.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-public class ClassTest extends HierarchicalTypeTest<ClassType> {
-
- @BeforeMethod
- public void setup() throws Exception {
- super.setup();
- }
-
- @Test
- public void test1() throws AtlasException {
-
- TypeSystem ts = getTypeSystem();
-
- defineDeptEmployeeTypes(ts);
- Referenceable hrDept = createDeptEg1(ts);
- ClassType deptType = ts.getDataType(ClassType.class, "Department");
- ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
-
-
- Assert.assertEquals(hrDept2.toString(), "{\n" +
- "\tid : (type: Department, id: <unassigned>)\n" +
- "\tname : \thr\n" +
- "\temployees : \t[{\n" +
- "\tid : (type: Person, id: <unassigned>)\n" +
- "\tname : \tJohn\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : (type: Manager, id: <unassigned>)\n" +
- "}, {\n" +
- "\tid : (type: Manager, id: <unassigned>)\n" +
- "\tsubordinates : \t[{\n" +
- "\tid : (type: Person, id: <unassigned>)\n" +
- "\tname : \tJohn\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : (type: Manager, id: <unassigned>)\n" +
- "}]\n" +
- "\tname : \tJane\n" +
- "\tdepartment : (type: Department, id: <unassigned>)\n" +
- "\tmanager : <null>\n" +
- "\n" +
- "\tSecurityClearance : \t{\n" +
- "\t\tlevel : \t\t1\n" +
- "\t}}]\n" +
- "}");
- }
-
-
- @Test
- public void testSerDeWithoutDescription() throws Exception {
- HierarchicalTypeDefinition<ClassType> clsType = TypesUtil
- .createClassTypeDef("Random", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
-
- TypesDef typesDef = getTypesDef(clsType);
- String json = TypesSerialization.toJson(typesDef);
- System.out.println("json " + json);
- TypesSerialization.fromJson(json);
- }
-
- @Test
- public void testSerDeWithDescription() throws Exception {
- HierarchicalTypeDefinition<ClassType> clsType = TypesUtil
- .createClassTypeDef("Random", "Random-description", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE));
- TypesDef typesDef = getTypesDef(clsType);
- String json = TypesSerialization.toJson(typesDef);
- System.out.println("json " + json);
- TypesSerialization.fromJson(json);
- }
- @Override
- protected HierarchicalTypeDefinition<ClassType> getTypeDefinition(String name, AttributeDefinition... attributes) {
- return new HierarchicalTypeDefinition(ClassType.class, name, null, null, attributes);
- }
-
- @Override
- protected HierarchicalTypeDefinition<ClassType> getTypeDefinition(String name, ImmutableSet<String> superTypes,
- AttributeDefinition... attributes) {
- return new HierarchicalTypeDefinition(ClassType.class, name, null, superTypes, attributes);
- }
-
- @Override
- protected TypesDef getTypesDef(StructTypeDefinition typeDefinition) {
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of((HierarchicalTypeDefinition<ClassType>) typeDefinition));
- }
-
- @Override
- protected TypesDef getTypesDef(HierarchicalTypeDefinition<ClassType>... typeDefinitions) {
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.copyOf(typeDefinitions));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/EnumTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/EnumTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/EnumTest.java
deleted file mode 100755
index 2307192..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/EnumTest.java
+++ /dev/null
@@ -1,245 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.Date;
-import java.util.Map;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-
-public class EnumTest extends BaseTest {
-
- @BeforeMethod
- public void setup() throws Exception {
- super.setup();
- }
-
- void defineEnums(TypeSystem ts) throws AtlasException {
- ts.defineEnumType("HiveObjectType", new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2),
- new EnumValue("TABLE", 3), new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5));
-
- ts.defineEnumType("PrincipalType", new EnumValue("USER", 1), new EnumValue("ROLE", 2),
- new EnumValue("GROUP", 3));
-
- ts.defineEnumType("TxnState", new EnumValue("COMMITTED", 1), new EnumValue("ABORTED", 2),
- new EnumValue("OPEN", 3));
-
- ts.defineEnumType("LockLevel", new EnumValue("DB", 1), new EnumValue("TABLE", 2),
- new EnumValue("PARTITION", 3));
- }
-
- @Test
- public void testTypeUpdate() throws Exception {
- TypeSystem ts = getTypeSystem();
- EnumTypeDefinition etd = new EnumTypeDefinition(newName(), new EnumValue("A", 1));
- TypesDef typesDef = getTypesDef(etd);
- ts.defineTypes(typesDef);
-
- //Allow adding new enum
- etd = new EnumTypeDefinition(etd.name, new EnumValue("A", 1), new EnumValue("B", 2));
- typesDef = getTypesDef(etd);
- ts.updateTypes(typesDef);
-
- //Don't allow deleting enum
- etd = new EnumTypeDefinition(etd.name, new EnumValue("A", 1));
- typesDef = getTypesDef(etd);
- try {
- ts.updateTypes(typesDef);
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //assert that type is not updated when validation fails
- EnumType enumType = ts.getDataType(EnumType.class, etd.name);
- Assert.assertEquals(enumType.values().size(), 2);
- }
-
- //Don't allow changing ordinal of existing enum value
- etd = new EnumTypeDefinition(etd.name, new EnumValue("A", 2));
- typesDef = getTypesDef(etd);
- try {
- ts.updateTypes(typesDef);
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //expected
- }
- }
-
- private TypesDef getTypesDef(EnumTypeDefinition enumTypeDefinition) {
- return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- }
-
- protected void fillStruct(Struct s) throws AtlasException {
- s.set("a", 1);
- s.set("b", true);
- s.set("c", (byte) 1);
- s.set("d", (short) 2);
- s.set("e", 1);
- s.set("f", 1);
- s.set("g", 1L);
- s.set("h", 1.0f);
- s.set("i", 1.0);
- s.set("j", BigInteger.valueOf(1L));
- s.set("k", new BigDecimal(1));
- s.set("l", new Date(1418265358440L));
- s.set("m", Lists.asList(1, new Integer[]{1}));
- s.set("n", Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
- Map<String, Double> hm = Maps.newHashMap();
- hm.put("a", 1.0);
- hm.put("b", 2.0);
- s.set("o", hm);
- s.set("enum1", "GLOBAL");
- s.set("enum2", 1);
- s.set("enum3", "COMMITTED");
- s.set("enum4", 3);
- }
-
- protected Struct createStructWithEnum(String typeName) throws AtlasException {
- Struct s = new Struct(typeName);
- fillStruct(s);
- return s;
- }
-
- protected Referenceable createInstanceWithEnum(String typeName) throws AtlasException {
- Referenceable r = new Referenceable(typeName);
- fillStruct(r);
- return r;
- }
-
- protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws AtlasException {
- return ts.defineClassType(
- createClassTypeDef("t4", ImmutableSet.<String>of(), createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
- createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
- createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
- createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
- createOptionalAttrDef("g", DataTypes.LONG_TYPE),
- createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
- createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
- createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
- createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
- createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
- createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
- createOptionalAttrDef("l", DataTypes.DATE_TYPE),
- createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
- createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
- createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
- createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel"))));
- }
-
- @Test
- public void testStruct() throws AtlasException {
- TypeSystem ts = getTypeSystem();
- defineEnums(ts);
- StructType structType = ts.defineStructType("ts", true, createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
- createOptionalAttrDef("enum1", ts.getDataType(EnumType.class, "HiveObjectType")),
- createOptionalAttrDef("e", DataTypes.INT_TYPE), createOptionalAttrDef("f", DataTypes.INT_TYPE),
- createOptionalAttrDef("g", DataTypes.LONG_TYPE),
- createOptionalAttrDef("enum2", ts.getDataType(EnumType.class, "PrincipalType")),
- createOptionalAttrDef("h", DataTypes.FLOAT_TYPE), createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
- createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
- createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
- createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
- createOptionalAttrDef("l", DataTypes.DATE_TYPE),
- createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
- createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
- createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
- createOptionalAttrDef("enum4", ts.getDataType(EnumType.class, "LockLevel")));
-
- Struct s = createStructWithEnum("ts");
- ITypedStruct typedS = structType.convert(s, Multiplicity.REQUIRED);
- Assert.assertEquals(typedS.toString(), "{\n" +
- "\ta : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\td : \t2\n" +
- "\tenum1 : \tGLOBAL\n" +
- "\te : \t1\n" +
- "\tf : \t1\n" +
- "\tg : \t1\n" +
- "\tenum2 : \tUSER\n" +
- "\th : \t1.0\n" +
- "\ti : \t1.0\n" +
- "\tj : \t1\n" +
- "\tk : \t1\n" +
- "\tenum3 : \tCOMMITTED\n" +
- "\tl : \t" + TEST_DATE + "\n" +
- "\tm : \t[1, 1]\n" +
- "\tn : \t[1.1, 1.1]\n" +
- "\to : \t{a=1.0, b=2.0}\n" +
- "\tenum4 : \tPARTITION\n" +
- "}");
- }
-
- @Test
- public void testClass() throws AtlasException {
- TypeSystem ts = getTypeSystem();
- defineEnums(ts);
- ClassType clsType = defineClassTypeWithEnum(ts);
-
- IReferenceableInstance r = createInstanceWithEnum("t4");
- ITypedReferenceableInstance typedR = clsType.convert(r, Multiplicity.REQUIRED);
- Assert.assertEquals(typedR.toString(), "{\n" +
- "\tid : (type: t4, id: <unassigned>)\n" +
- "\ta : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\td : \t2\n" +
- "\tenum1 : \tGLOBAL\n" +
- "\te : \t1\n" +
- "\tf : \t1\n" +
- "\tg : \t1\n" +
- "\tenum2 : \tUSER\n" +
- "\th : \t1.0\n" +
- "\ti : \t1.0\n" +
- "\tj : \t1\n" +
- "\tk : \t1\n" +
- "\tenum3 : \tCOMMITTED\n" +
- "\tl : \t" + TEST_DATE + "\n" +
- "\tm : \t[1, 1]\n" +
- "\tn : \t[1.1, 1.1]\n" +
- "\to : \t{a=1.0, b=2.0}\n" +
- "\tenum4 : \tPARTITION\n" +
- "}");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/FieldMappingTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/FieldMappingTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/FieldMappingTest.java
deleted file mode 100644
index 0259ade..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/FieldMappingTest.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types;
-
-import java.util.HashSet;
-
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeTest;
-import org.testng.annotations.Test;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-
-/**
- * Unit test for {@link FieldMapping}
- *
- */
-public class FieldMappingTest {
-
- @BeforeTest
- public void beforeTest() throws Exception {
- TypeSystem typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
- }
-
- @Test
- public void testOutputReferenceableInstance() throws Exception {
- // ATLAS-645: verify that FieldMapping.output(IReferenceableInstance)
- // does not infinitely recurse when ITypedReferenceableInstance's reference each other.
- HierarchicalTypeDefinition<ClassType> valueDef = TypesUtil.createClassTypeDef("Value",
- ImmutableSet.<String>of(),
- new AttributeDefinition("owner", "Owner", Multiplicity.OPTIONAL, false, null));
-
- // Define class type with reference, where the value is a class reference to Value.
- HierarchicalTypeDefinition<ClassType> ownerDef = TypesUtil.createClassTypeDef("Owner",
- ImmutableSet.<String>of(),
- new AttributeDefinition("value", "Value", Multiplicity.OPTIONAL, false, null));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(ownerDef, valueDef));
-
- TypeSystem typeSystem = TypeSystem.getInstance();
- typeSystem.defineTypes(typesDef);
- ClassType ownerType = typeSystem.getDataType(ClassType.class, "Owner");
-
- // Prior to fix for ATLAS-645, this call would throw a StackOverflowError
- try {
- ownerType.toString();
- }
- catch (StackOverflowError e) {
- Assert.fail("Infinite recursion in ClassType.toString() caused StackOverflowError");
- }
-
- ClassType valueType = typeSystem.getDataType(ClassType.class, "Value");
-
- // Create instances of Owner and Value that reference each other.
- ITypedReferenceableInstance ownerInstance = ownerType.createInstance();
- ITypedReferenceableInstance valueInstance = valueType.createInstance();
- // Set Owner.value reference to Value instance.
- ownerInstance.set("value", valueInstance);
- // Set Value.owner reference on Owner instance.
- valueInstance.set("owner", ownerInstance);
-
- // Prior to fix for ATLAS-645, this call would throw a StackOverflowError
- try {
- ownerInstance.fieldMapping().output(ownerInstance, new StringBuilder(), "", new HashSet<IReferenceableInstance>());
- }
- catch (StackOverflowError e) {
- Assert.fail("Infinite recursion in FieldMapping.output() caused StackOverflowError");
- }
- }
-
- @Test
- public void testOutputStruct() throws Exception {
- // ATLAS-645: verify that FieldMapping.output(IStruct) does not infinitely recurse
- // when an IStruct and ITypedReferenceableInstance reference each other.
- HierarchicalTypeDefinition<ClassType> valueDef = TypesUtil.createClassTypeDef("Value",
- ImmutableSet.<String>of(),
- new AttributeDefinition("owner", "Owner", Multiplicity.OPTIONAL, false, null));
-
-
- // Define struct type with reference, where the value is a class reference to Value.
- StructTypeDefinition ownerDef = TypesUtil.createStructTypeDef("Owner",
- new AttributeDefinition("value", "Value", Multiplicity.OPTIONAL, false, null));
-
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.of(ownerDef), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(valueDef));
-
- TypeSystem typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
- typeSystem.defineTypes(typesDef);
- StructType ownerType = typeSystem.getDataType(StructType.class, "Owner");
- ClassType valueType = typeSystem.getDataType(ClassType.class, "Value");
-
- // Prior to fix for ATLAS-645, this call would throw a StackOverflowError
- try {
- ownerType.toString();
- }
- catch (StackOverflowError e) {
- Assert.fail("Infinite recursion in StructType.toString() caused StackOverflowError");
- }
-
-
- // Create instances of Owner and Value that reference each other.
- ITypedStruct ownerInstance = ownerType.createInstance();
- ITypedReferenceableInstance valueInstance = valueType.createInstance();
- // Set Owner.value reference to Value instance.
- ownerInstance.set("value", valueInstance);
- // Set Value.owner reference on Owner instance.
- valueInstance.set("owner", ownerInstance);
-
- // Prior to fix for ATLAS-645, this call would throw a StackOverflowError
- try {
- ownerInstance.fieldMapping().output(ownerInstance, new StringBuilder(), "", null);
- }
- catch (StackOverflowError e) {
- Assert.fail("Infinite recursion in FieldMapping.output() caused StackOverflowError");
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorterTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorterTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorterTest.java
deleted file mode 100644
index 19bdccf..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorterTest.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types;
-
-import java.util.Arrays;
-import java.util.List;
-
-import org.apache.atlas.AtlasException;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import com.google.common.collect.ImmutableSet;
-
-
-public class HierarchicalTypeDependencySorterTest {
-
- @BeforeMethod
- public void setup() throws Exception {
- TypeSystem ts = TypeSystem.getInstance();
- ts.reset();
- }
-
- @SuppressWarnings("rawtypes")
- @Test
- public void testSimpleModel() throws AtlasException {
- TypeSystem ts = TypeSystem.getInstance();
- HierarchicalType a = new ClassType(ts, "a", null, ImmutableSet.<String>of(), 0);
- HierarchicalType b = new ClassType(ts, "B", null, ImmutableSet.of("a"), 0);
- HierarchicalType c = new ClassType(ts, "C", null, ImmutableSet.of("B"), 0);
-
- List<HierarchicalType> unsortedTypes = Arrays.asList(c, a, b);
- List<HierarchicalType> sortedTypes = HierarchicalTypeDependencySorter.sortTypes(unsortedTypes);
- Assert.assertEquals(sortedTypes.size(), 3);
- Assert.assertEquals(sortedTypes.indexOf(a), 0);
- Assert.assertEquals(sortedTypes.indexOf(b), 1);
- Assert.assertEquals(sortedTypes.indexOf(c), 2);
- }
-
- @SuppressWarnings("rawtypes")
- @Test
- public void testLargerModel() throws Exception {
- TypeSystem ts = TypeSystem.getInstance();
- HierarchicalType testObjectType = new ClassType(ts, "TestObject", null, ImmutableSet.<String>of(), 0);
- HierarchicalType testDataSetType = new ClassType(ts, "TestDataSet", null, ImmutableSet.of("TestObject"), 0);
- HierarchicalType testColumnType = new ClassType(ts, "TestColumn", null, ImmutableSet.of("TestObject"), 0);
- HierarchicalType testRelationalDataSetType = new ClassType(ts, "TestRelationalDataSet", null, ImmutableSet.of("TestDataSet"), 0);
- HierarchicalType testTableType = new ClassType(ts, "Table", null, ImmutableSet.of("TestDataSet"), 0);
- HierarchicalType testDataFileType = new ClassType(ts, "TestDataFile", null, ImmutableSet.of("TestRelationalDataSet"), 0);
- HierarchicalType testDocumentType = new ClassType(ts, "TestDocument", null, ImmutableSet.of("TestDataSet"), 0);
- HierarchicalType testAnnotationType = new ClassType(ts, "TestAnnotation", null, ImmutableSet.<String>of(), 0);
- HierarchicalType myNewAnnotationType = new ClassType(ts, "MyNewAnnotation", null, ImmutableSet.of("TestAnnotation"), 0);
-
- List<HierarchicalType> unsortedTypes = Arrays.asList(testTableType, testColumnType, myNewAnnotationType, testDataSetType,
- testDataFileType, testAnnotationType, testRelationalDataSetType, testObjectType, testDocumentType);
- List<HierarchicalType> sortedTypes = HierarchicalTypeDependencySorter.sortTypes(unsortedTypes);
- // Verify that super types were sorted before their subtypes.
- Assert.assertTrue(sortedTypes.indexOf(testObjectType) < sortedTypes.indexOf(testDataSetType));
- Assert.assertTrue(sortedTypes.indexOf(testObjectType) < sortedTypes.indexOf(testColumnType));
- Assert.assertTrue(sortedTypes.indexOf(testDataSetType) < sortedTypes.indexOf(testRelationalDataSetType));
- Assert.assertTrue(sortedTypes.indexOf(testDataSetType) < sortedTypes.indexOf(testDocumentType));
- Assert.assertTrue(sortedTypes.indexOf(testDataSetType) < sortedTypes.indexOf(testTableType));
- Assert.assertTrue(sortedTypes.indexOf(testRelationalDataSetType) < sortedTypes.indexOf(testDataFileType));
- Assert.assertTrue(sortedTypes.indexOf(testAnnotationType) < sortedTypes.indexOf(myNewAnnotationType));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeTest.java
deleted file mode 100644
index 9f63f52..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/HierarchicalTypeTest.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.Test;
-
-public abstract class HierarchicalTypeTest<T extends HierarchicalType> extends TypeUpdateBaseTest {
- @Test
- public void testTypeUpdate() throws Exception {
- testTypeUpdateForAttributes();
-
- //Test super types
- HierarchicalTypeDefinition baseSuperType =
- getTypeDefinition(newName(), TypesUtil.createOptionalAttrDef("s", DataTypes.INT_TYPE));
-
- HierarchicalTypeDefinition classType = getTypeDefinition(newName(), ImmutableSet.of(baseSuperType.typeName),
- TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE));
- TypeSystem ts = getTypeSystem();
- ts.defineTypes(getTypesDef(baseSuperType, classType));
-
- //Add super type with optional attribute
- HierarchicalTypeDefinition superType =
- getTypeDefinition(newName(), TypesUtil.createOptionalAttrDef("s", DataTypes.INT_TYPE));
- classType = getTypeDefinition(classType.typeName, ImmutableSet.of(superType.typeName, baseSuperType.typeName),
- TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(superType, classType));
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //expected
- }
-
- //Add super type with required attribute should fail
- HierarchicalTypeDefinition superTypeRequired =
- getTypeDefinition(newName(), TypesUtil.createRequiredAttrDef("s", DataTypes.INT_TYPE));
- classType = getTypeDefinition(classType.typeName,
- ImmutableSet.of(superTypeRequired.typeName, baseSuperType.typeName),
- TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(superTypeRequired, classType));
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //expected
- }
-
- //Deleting super type should fail
- classType = getTypeDefinition(classType.typeName, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(classType));
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //expected
- }
- }
-
- @Override
- protected abstract HierarchicalTypeDefinition<T> getTypeDefinition(String name, AttributeDefinition... attributes);
-
- protected abstract HierarchicalTypeDefinition<T> getTypeDefinition(String name, ImmutableSet<String> superTypes,
- AttributeDefinition... attributes);
-
- @Override
- protected abstract TypesDef getTypesDef(StructTypeDefinition typeDefinition);
-
- protected abstract TypesDef getTypesDef(HierarchicalTypeDefinition<T>... typeDefinitions);
-
- @Override
- protected int getNumberOfFields(TypeSystem ts, String typeName) throws Exception {
- return ts.getDataType(HierarchicalType.class, typeName).numFields;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/StructTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/StructTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/StructTest.java
deleted file mode 100755
index 3a1675e..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/StructTest.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import static org.testng.Assert.assertTrue;
-
-public class StructTest extends TypeUpdateBaseTest {
-
- StructType structType;
- StructType recursiveStructType;
- StructType invalidStructType;
-
- @BeforeMethod
- public void setup() throws Exception {
- super.setup();
- structType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
- recursiveStructType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_2);
- invalidStructType = getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_3);
- }
-
- @Test
- public void test1() throws AtlasException {
- Struct s = createStruct();
- ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\ta : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\td : \t2\n" +
- "\te : \t1\n" +
- "\tf : \t1\n" +
- "\tg : \t1\n" +
- "\th : \t1.0\n" +
- "\ti : \t1.0\n" +
- "\tj : \t1\n" +
- "\tk : \t1\n" +
- "\tl : \t" + TEST_DATE + "\n" +
- "\tm : \t[1, 1]\n" +
- "\tn : \t[1.1, 1.1]\n" +
- "\to : \t{a=1.0, b=2.0}\n" +
- "\tp : \t\n" +
- "\tq : \t<null>\n"+
- "\tr : \t{a=}\n" +
- "}");
- }
-
- @Test
- public void testStructWithEmptyString() throws AtlasException{
- try {
- assertTrue(getTypeSystem().getTypeNames().contains("t3"));
- Struct s = new Struct(invalidStructType.getName());
- s.set("a", "");
- ITypedStruct ts = invalidStructType.convert(s, Multiplicity.REQUIRED);
- }
- catch (AtlasException e){
- String err = "org.apache.atlas.typesystem.types.ValueConversionException: Cannot convert value 'org.apache.atlas.typesystem.Struct@1ba02' to datatype t3";
- Assert.assertEquals(e.toString(), err);
- }
- }
-
- @Test
- public void testRecursive() throws AtlasException {
- Struct s1 = new Struct(recursiveStructType.getName());
- s1.set("a", 1);
- Struct s2 = new Struct(recursiveStructType.getName());
- s2.set("a", 1);
- s2.set("s", s1);
- ITypedStruct ts = recursiveStructType.convert(s2, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\ta : \t1\n" +
- "\ts : \t{\n" +
- "\t\ta : \t\t1\n" +
- "\t\ts : <null>\n" +
- "\n" +
- "\t}\n" +
- "}");
- }
-
- @Test
- public void testTypeUpdate() throws Exception {
- testTypeUpdateForAttributes();
- }
-
- @Override
- protected int getNumberOfFields(TypeSystem ts, String typeName) throws Exception {
- return ts.getDataType(StructType.class, typeName).numFields;
- }
-
- @Override
- protected StructTypeDefinition getTypeDefinition(String name, AttributeDefinition... attributes) {
- return new StructTypeDefinition(name, attributes);
- }
-
- @Override
- protected TypesDef getTypesDef(StructTypeDefinition typeDefinition) {
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.of(typeDefinition),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/TraitTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TraitTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TraitTest.java
deleted file mode 100755
index 7c39213..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TraitTest.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
-
-public class TraitTest extends HierarchicalTypeTest<TraitType> {
-
-
- @BeforeMethod
- public void setup() throws Exception {
- super.setup();
- }
-
- /*
- * Type Hierarchy is:
- * A(a,b,c,d)
- * B(b) extends A
- * C(c) extends A
- * D(d) extends B,C
- *
- * - There are a total of 11 fields in an instance of D
- * - an attribute that is hidden by a SubType can referenced by prefixing it with the
- * complete Path.
- * For e.g. the 'b' attribute in A (that is a superType for B) is hidden the 'b' attribute
- * in B.
- * So it is available by the name 'A.B.D.b'
- *
- * - Another way to set attributes is to cast. Casting a 'D' instance of 'B' makes the 'A.B.D
- * .b' attribute
- * available as 'A.B.b'. Casting one more time to an 'A' makes the 'A.B.b' attribute
- * available as 'b'.
- */
- @Test
- public void test1() throws AtlasException {
- HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
- HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableSet.of("A"),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
- HierarchicalTypeDefinition C =
- createTraitTypeDef("C", ImmutableSet.of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
- HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableSet.of("B", "C"),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
-
- defineTraits(A, B, C, D);
-
- TraitType DType = getTypeSystem().getDataType(TraitType.class, "D");
-
- // for(String aName : DType.fieldMapping().fields.keySet()) {
- // System.out.println(String.format("nameToQualifiedName.put(\"%s\", \"%s\");", aName, DType
- // .getQualifiedName(aName)));
- // }
-
- Map<String, String> nameToQualifiedName = new HashMap();
- {
- nameToQualifiedName.put("d", "D.d");
- nameToQualifiedName.put("b", "B.b");
- nameToQualifiedName.put("c", "C.c");
- nameToQualifiedName.put("a", "A.a");
- nameToQualifiedName.put("A.B.D.b", "A.B.D.b");
- nameToQualifiedName.put("A.B.D.c", "A.B.D.c");
- nameToQualifiedName.put("A.B.D.d", "A.B.D.d");
- nameToQualifiedName.put("A.C.D.a", "A.C.D.a");
- nameToQualifiedName.put("A.C.D.b", "A.C.D.b");
- nameToQualifiedName.put("A.C.D.c", "A.C.D.c");
- nameToQualifiedName.put("A.C.D.d", "A.C.D.d");
- }
-
- Struct s1 = new Struct("D");
- s1.set("d", 1);
- s1.set("c", 1);
- s1.set("b", true);
- s1.set("a", 1);
- s1.set("A.B.D.b", true);
- s1.set("A.B.D.c", 2);
- s1.set("A.B.D.d", 2);
-
- s1.set("A.C.D.a", 3);
- s1.set("A.C.D.b", false);
- s1.set("A.C.D.c", 3);
- s1.set("A.C.D.d", 3);
-
-
- ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \ttrue\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
-
- /*
- * cast to B and set the 'b' attribute on A.
- */
- TraitType BType = getTypeSystem().getDataType(TraitType.class, "B");
- IStruct s2 = DType.castAs(ts, "B");
- s2.set("A.B.b", false);
-
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \tfalse\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
-
- /*
- * cast again to A and set the 'b' attribute on A.
- */
- TraitType AType = getTypeSystem().getDataType(TraitType.class, "A");
- IStruct s3 = BType.castAs(s2, "A");
- s3.set("b", true);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \ttrue\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
- }
-
- @Test
- public void testRandomOrder() throws AtlasException {
- HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
- HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableSet.of("A"),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
- HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableSet.of("A"),
- createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
- HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableSet.of("B", "C"),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
-
- defineTraits(B, D, A, C);
-
- TraitType DType = getTypeSystem().getDataType(TraitType.class, "D");
-
- Struct s1 = new Struct("D");
- s1.set("d", 1);
- s1.set("c", 1);
- s1.set("b", true);
- s1.set("a", 1);
- s1.set("A.B.D.b", true);
- s1.set("A.B.D.c", 2);
- s1.set("A.B.D.d", 2);
-
- s1.set("A.C.D.a", 3);
- s1.set("A.C.D.b", false);
- s1.set("A.C.D.c", 3);
- s1.set("A.C.D.d", 3);
-
-
- ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \ttrue\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
- }
-
- @Override
- protected HierarchicalTypeDefinition<TraitType> getTypeDefinition(String name, AttributeDefinition... attributes) {
- return new HierarchicalTypeDefinition(TraitType.class, name, null, null, attributes);
- }
-
- @Override
- protected HierarchicalTypeDefinition<TraitType> getTypeDefinition(String name, ImmutableSet<String> superTypes,
- AttributeDefinition... attributes) {
- return new HierarchicalTypeDefinition(TraitType.class, name, null, superTypes, attributes);
- }
-
- @Override
- protected TypesDef getTypesDef(StructTypeDefinition typeDefinition) {
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of((HierarchicalTypeDefinition<TraitType>) typeDefinition),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- }
-
- @Override
- protected TypesDef getTypesDef(HierarchicalTypeDefinition<TraitType>... typeDefinitions) {
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.copyOf(typeDefinitions), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- }
-}
-
[26/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/Expressions.scala b/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
deleted file mode 100644
index bf9efd2..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/Expressions.scala
+++ /dev/null
@@ -1,923 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import com.google.common.collect.ImmutableCollection
-import org.apache.atlas.AtlasException
-import org.apache.atlas.typesystem.types.DataTypes.{ArrayType, PrimitiveType, TypeCategory}
-import org.apache.atlas.typesystem.types._
-import scala.collection.JavaConverters._
-
-object Expressions {
-
- import TypeUtils._
-
- class ExpressionException(val e: Expression, message: String, cause: Throwable, enableSuppression: Boolean,
- writableStackTrace: Boolean)
- extends AtlasException(message, cause, enableSuppression, writableStackTrace) {
-
- def this(e: Expression, message: String) {
- this(e, message, null, false, true)
- }
-
- def this(e: Expression, message: String, cause: Throwable) {
- this(e, message, cause, false, true)
- }
-
- def this(e: Expression, cause: Throwable) {
- this(e, null, cause, false, true)
- }
-
- override def getMessage: String = {
- val eString = e.toString
- s"${super.getMessage}, expression:${if (eString contains "\n") "\n" else " "}$e"
- }
-
- }
-
- class UnresolvedException(expr: Expression, function: String) extends
- ExpressionException(expr, s"Unresolved $function")
-
- def attachExpression[A](e: Expression, msg: String = "")(f: => A): A = {
- try f catch {
- case eex: ExpressionException => throw eex
- case ex: Exception => throw new ExpressionException(e, msg, ex)
- }
- }
-
- trait Expression {
- self: Product =>
- def isAggregator = false
- def children: Seq[Expression]
-
- /**
- * Returns `true` if the schema for this expression and all its children have been resolved.
- * The default logic is that an Expression is resolve if all its children are resolved.
- */
- lazy val resolved: Boolean = childrenResolved
-
- /**
- * Returns the output [[IDataType[_]] of this expression. Expressions that are unresolved will
- * throw if this method is invoked.
- */
- def dataType: IDataType[_]
-
- /**
- * Returns true if all the children have been resolved.
- */
- def childrenResolved = !children.exists(!_.resolved)
-
-
- /**
- * the aliases that are present in this Expression Tree
- */
- def namedExpressions: Map[String, Expression] = Map()
-
- def fastEquals(other: Expression): Boolean = {
- this.eq(other) || this == other
- }
-
- def makeCopy(newArgs: Array[AnyRef]): this.type = attachExpression(this, "makeCopy") {
- try {
- val defaultCtor = getClass.getConstructors.find(_.getParameterTypes.size != 0).head
- defaultCtor.newInstance(newArgs: _*).asInstanceOf[this.type]
- } catch {
- case e: java.lang.IllegalArgumentException =>
- throw new ExpressionException(
- this, s"Failed to copy node. Reason: ${e.getMessage}.")
- }
- }
-
- def transformChildrenDown(rule: PartialFunction[Expression, Expression]): this.type = {
- var changed = false
- val newArgs = productIterator.map {
- case arg: Expression if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformDown(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- newChild
- } else {
- arg
- }
- case Some(arg: Expression) if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformDown(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- Some(newChild)
- } else {
- Some(arg)
- }
- case m: Map[_, _] => m
- case args: Traversable[_] => args.map {
- case arg: Expression if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformDown(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- newChild
- } else {
- arg
- }
- case other => other
- }
- case nonChild: AnyRef => nonChild
- case null => null
- }.toArray
- if (changed) makeCopy(newArgs) else this
- }
-
- def transformDown(rule: PartialFunction[Expression, Expression]): Expression = {
- val afterRule = rule.applyOrElse(this, identity[Expression])
- // Check if unchanged and then possibly return old copy to avoid gc churn.
- if (this fastEquals afterRule) {
- transformChildrenDown(rule)
- } else {
- afterRule.transformChildrenDown(rule)
- }
- }
-
- def traverseChildren(traverseFunc: (Expression, PartialFunction[Expression, Unit]) => Unit)
- (rule: PartialFunction[Expression, Unit]): Unit = {
- productIterator.foreach {
- case arg: Expression if children contains arg =>
- traverseFunc(arg.asInstanceOf[Expression], rule)
- case Some(arg: Expression) if children contains arg =>
- traverseFunc(arg.asInstanceOf[Expression], rule)
- case m: Map[_, _] => m
- case args: Traversable[_] => args.map {
- case arg: Expression if children contains arg =>
- traverseFunc(arg.asInstanceOf[Expression], rule)
- case other => other
- }
- case nonChild: AnyRef => nonChild
- case null => null
- }
- }
-
- def traverseChildrenDown = traverseChildren(_traverseDown) _
-
- private def _traverseDown(e: Expression, rule: PartialFunction[Expression, Unit]): Unit = {
- if (rule.isDefinedAt(e)) {
- rule.apply(e)
- }
- e.traverseChildrenDown(rule)
- }
-
- def traverseDown(rule: PartialFunction[Expression, Unit]): Unit = {
- _traverseDown(this, rule)
- }
-
- def traverseChildrenUp = traverseChildren(_traverseUp) _
-
- private def _traverseUp(e: Expression, rule: PartialFunction[Expression, Unit]): Unit = {
- e.traverseChildrenUp(rule)
- if (rule.isDefinedAt(e)) {
- rule.apply(e)
- }
- }
-
- def traverseUp(rule: PartialFunction[Expression, Unit]): Unit = {
- _traverseUp(this, rule)
- }
-
- def transformUp(rule: PartialFunction[Expression, Expression]): Expression = {
- val afterRuleOnChildren = transformChildrenUp(rule);
- if (this fastEquals afterRuleOnChildren) {
- rule.applyOrElse(this, identity[Expression])
- } else {
- rule.applyOrElse(afterRuleOnChildren, identity[Expression])
- }
- }
-
- def transformChildrenUp(rule: PartialFunction[Expression, Expression]): this.type = {
- var changed = false
- val newArgs = productIterator.map {
- case arg: Expression if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformUp(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- newChild
- } else {
- arg
- }
- case Some(arg: Expression) if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformUp(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- Some(newChild)
- } else {
- Some(arg)
- }
- case m: Map[_, _] => m
- case args: Traversable[_] => args.map {
- case arg: Expression if children contains arg =>
- val newChild = arg.asInstanceOf[Expression].transformUp(rule)
- if (!(newChild fastEquals arg)) {
- changed = true
- newChild
- } else {
- arg
- }
- case other => other
- }
- case nonChild: AnyRef => nonChild
- case null => null
- }.toArray
- if (changed) makeCopy(newArgs) else this
- }
-
- /*
- * treeString methods
- */
- def nodeName = getClass.getSimpleName
-
- def argString: String = productIterator.flatMap {
- case e: Expression if children contains e => Nil
- case e: Expression if e.toString contains "\n" => s"(${e.simpleString})" :: Nil
- case seq: Seq[_] => seq.mkString("[", ",", "]") :: Nil
- case set: Set[_] => set.mkString("{", ",", "}") :: Nil
- case f: IDataType[_] => f.getName :: Nil
- case other => other :: Nil
- }.mkString(", ")
-
- /** String representation of this node without any children */
- def simpleString = s"$nodeName $argString"
-
- protected def generateTreeString(depth: Int, builder: StringBuilder): StringBuilder = {
- builder.append(" " * depth)
- builder.append(simpleString)
- builder.append("\n")
- children.foreach(_.generateTreeString(depth + 1, builder))
- builder
- }
-
- def treeString = generateTreeString(0, new StringBuilder).toString
-
- /*
- * Fluent API methods
- */
- def field(fieldName: String) = new UnresolvedFieldExpression(this, fieldName)
-
- def join(fieldName: String) = field(fieldName)
-
- def `.`(fieldName: String) = field(fieldName)
-
- def as(alias: String) = new AliasExpression(this, alias)
-
- def arith(op: String)(rightExpr: Expression) = new ArithmeticExpression(op, this, rightExpr)
-
- def + = arith("+") _
-
- def - = arith("-") _
-
- def * = arith("*") _
-
- def / = arith("/") _
-
- def % = arith("%") _
-
- def isTrait(name: String) = new isTraitUnaryExpression(name, this)
-
- def hasField(name: String) = new hasFieldUnaryExpression(name, this)
-
- def compareOp(op: String)(rightExpr: Expression) = new ComparisonExpression(op, this, rightExpr)
-
- def `=` = compareOp("=") _
-
- def `!=` = compareOp("!=") _
-
- def `>` = compareOp(">") _
-
- def `>=` = compareOp(">=") _
-
- def `<` = compareOp("<") _
-
- def `<=` = compareOp("=") _
-
- def logicalOp(op: String)(rightExpr: Expression) = new LogicalExpression(op, List(this, rightExpr))
-
- def and = logicalOp("and") _
-
- def or = logicalOp("or") _
-
- def where(condExpr: Expression) = new FilterExpression(this, condExpr)
-
- def select(selectList: Expression*) = new SelectExpression(this, selectList.toList)
-
- def loop(loopingExpr: Expression) = new LoopExpression(this, loopingExpr, None)
-
- def loop(loopingExpr: Expression, times: Literal[Integer]) =
- new LoopExpression(this, loopingExpr, Some(times))
-
- def traitInstance() = new TraitInstanceExpression(this)
- def instance() = new InstanceExpression(this)
-
- def path() = new PathExpression(this)
-
- def limit(lmt: Literal[Integer], offset : Literal[Integer]) = new LimitExpression(this, lmt, offset)
-
- def order(odr: Expression, asc: Boolean) = new OrderExpression(this, odr, asc)
-
- def max(maxClause: Expression) = new MaxExpression(maxClause)
-
- def min(minClause: Expression) = new MinExpression(minClause)
-
- def groupBy(groupBy: SelectExpression, selectExpr: SelectExpression) = new GroupByExpression(this, groupBy, selectExpr)
- }
-
- trait BinaryNode {
- self: Expression =>
- def left: Expression
-
- def right: Expression
-
- def children = Seq(left, right)
-
- override def namedExpressions = left.namedExpressions ++ right.namedExpressions
- }
-
- trait LeafNode {
- def children = Nil
- }
-
- trait UnaryNode {
- self: Expression =>
- def child: Expression
-
- override def namedExpressions = child.namedExpressions
-
- def children = child :: Nil
- }
-
- abstract class BinaryExpression extends Expression with BinaryNode {
- self: Product =>
- def symbol: String
-
- override def toString = s"($left $symbol $right)"
- }
-
- case class ClassExpression(clsName: String) extends Expression with LeafNode {
- val dataType = typSystem.getDataType(classOf[ClassType], clsName)
-
- override def toString = clsName
- }
-
- def _class(name: String): Expression = new ClassExpression(name)
-
- case class TraitExpression(traitName: String) extends Expression with LeafNode {
- val dataType = typSystem.getDataType(classOf[TraitType], traitName)
-
- override def toString = traitName
- }
-
- def _trait(name: String) = new TraitExpression(name)
-
- object IdExpressionType extends Enumeration {
- val Unresolved, NonType = Value;
-
- class IdExpressionTypeValue(exprValue : Value) {
-
- def isTypeAllowed = exprValue match {
- case Unresolved => true
- case _ => false
- }
- }
- import scala.language.implicitConversions
- implicit def value2ExprValue(exprValue: Value) = new IdExpressionTypeValue(exprValue)
- }
-
- case class IdExpression(name: String, exprType: IdExpressionType.Value) extends Expression with LeafNode {
- override def toString = name
-
- override lazy val resolved = false
-
- override def dataType = throw new UnresolvedException(this, "id")
- }
-
- /**
- * Creates an IdExpression whose allowed value type will be determined
- * later.
- */
- def id(name: String) = new IdExpression(name, IdExpressionType.Unresolved)
-
- /**
- * Creates an IdExpression whose value must resolve to a field name
- */
- def fieldId(name: String) = new IdExpression(name, IdExpressionType.NonType)
-
- case class UnresolvedFieldExpression(child: Expression, fieldName: String) extends Expression
- with UnaryNode {
- override def toString = s"${child}.$fieldName"
- override def isAggregator = child.isAggregator
- override lazy val resolved = false
-
- override def dataType = throw new UnresolvedException(this, "field")
- }
-
- case class FieldExpression(fieldName: String, fieldInfo: FieldInfo, child: Option[Expression])
- extends Expression {
-
- def elemType(t: IDataType[_]): IDataType[_] = {
- if (t.getTypeCategory == TypeCategory.ARRAY) {
- val aT = t.asInstanceOf[ArrayType]
- if (aT.getElemType.getTypeCategory == TypeCategory.CLASS ||
- aT.getElemType.getTypeCategory == TypeCategory.STRUCT) {
- return aT.getElemType
- }
- }
- t
- }
-
- val children = if (child.isDefined) List(child.get) else Nil
- import scala.language.existentials
- lazy val dataType = {
- val t = {
- if (fieldInfo.traitName != null ) {
- typSystem.getDataType(classOf[TraitType], fieldInfo.traitName)
- } else if (!fieldInfo.isReverse) {
- fieldInfo.attrInfo.dataType()
- } else {
- fieldInfo.reverseDataType
- }
- }
- elemType(t)
- }
- override lazy val resolved: Boolean = true
-
- override def namedExpressions = if (child.isDefined) child.get.namedExpressions else Map()
-
- override def toString = {
- if (child.isDefined) {
- val sep = if (dataType.isInstanceOf[ClassType]) " " else "."
- s"${child.get}${sep}$fieldName"
- } else {
- fieldName
- }
- }
- }
-
- case class AliasExpression(child: Expression, alias: String) extends Expression with UnaryNode {
- override def namedExpressions = child.namedExpressions + (alias -> child)
-
- override def toString = s"$child as $alias"
- override def isAggregator = child.isAggregator
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- child.dataType
- }
- }
-
- case class BackReference(alias: String, reference: Expression, child: Option[Expression]) extends Expression {
- val children = if (child.isDefined) List(child.get) else Nil
- val dataType = reference.dataType
-
- override def namedExpressions = if (child.isDefined) child.get.namedExpressions else Map()
-
- override def toString = if (child.isDefined) s"${child.get} $alias" else alias
- }
-
- case class Literal[T](dataType: PrimitiveType[T], rawValue: Any) extends Expression with LeafNode {
- val value = if (rawValue == null) dataType.nullValue() else dataType.convert(rawValue, Multiplicity.REQUIRED)
-
- override def toString = value match {
- case s: String => s""""$s""""
- case x => x.toString
- }
- }
-
- import scala.collection.JavaConversions._
- case class ListLiteral[_](dataType: ArrayType, rawValue: List[Expressions.Literal[_]]) extends Expression with LeafNode {
-
- val lc : java.util.List[Expressions.Literal[_]] = rawValue
- val value = if (rawValue != null) dataType.convert(lc, Multiplicity.REQUIRED)
-
- override def toString = value match {
- case l: Seq[_]
- => l.mkString("[",",","]")
- case c: ImmutableCollection[_] =>
- c.asList.mkString("[",",","]")
- case x =>
- x.toString
- }
- }
-
- def literal[T](typ: PrimitiveType[T], rawValue: Any) = new Literal[T](typ, rawValue)
-
- def boolean(rawValue: Any) = literal(DataTypes.BOOLEAN_TYPE, rawValue)
-
- def byte(rawValue: Any) = literal(DataTypes.BYTE_TYPE, rawValue)
-
- def short(rawValue: Any) = literal(DataTypes.SHORT_TYPE, rawValue)
-
- def int(rawValue: Any) = literal(DataTypes.INT_TYPE, rawValue)
-
- def long(rawValue: Any) = literal(DataTypes.LONG_TYPE, rawValue)
-
- def float(rawValue: Any) = literal(DataTypes.FLOAT_TYPE, rawValue)
-
- def double(rawValue: Any) = literal(DataTypes.DOUBLE_TYPE, rawValue)
-
- def bigint(rawValue: Any) = literal(DataTypes.BIGINTEGER_TYPE, rawValue)
-
- def bigdecimal(rawValue: Any) = literal(DataTypes.BIGDECIMAL_TYPE, rawValue)
-
- def string(rawValue: Any) = literal(DataTypes.STRING_TYPE, rawValue)
-
- def date(rawValue: Any) = literal(DataTypes.DATE_TYPE, rawValue)
-
- def list[_ <: PrimitiveType[_]](listElements: List[Expressions.Literal[_]]) = {
- listLiteral(TypeSystem.getInstance().defineArrayType(listElements.head.dataType), listElements)
- }
-
- def listLiteral[_ <: PrimitiveType[_]](typ: ArrayType, rawValue: List[Expressions.Literal[_]]) = new ListLiteral(typ, rawValue)
-
- def count() = new CountExpression()
-
- def maxExpr(maxClause: Expression) = new MaxExpression(maxClause)
-
- def minExpr(minClause: Expression) = new MinExpression(minClause)
-
- def sumExpr(sumClause: Expression) = new SumExpression(sumClause)
-
- case class ArithmeticExpression(symbol: String,
- left: Expression,
- right: Expression)
- extends BinaryExpression {
-
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- TypeUtils.combinedType(left.dataType, right.dataType)
- }
- }
-
- case class isTraitLeafExpression(traitName: String, classExpression: Option[Expression] = None)
- extends Expression with LeafNode {
- // validate TraitName
- try {
- typSystem.getDataType(classOf[TraitType], traitName)
- } catch {
- case me: AtlasException => throw new ExpressionException(this, "not a TraitType", me)
- }
-
- override lazy val resolved = classExpression.isDefined
- lazy val dataType = {
-
- if (!resolved) {
- throw new UnresolvedException(this,
- s"cannot resolve isTrait application")
- }
-
- if (!classExpression.get.dataType.isInstanceOf[ClassType]) {
- throw new ExpressionException(this,
- s"Cannot apply isTrait on ${classExpression.get.dataType.getName}, it is not a ClassType")
- }
- DataTypes.BOOLEAN_TYPE
- }
-
- override def toString = s"${classExpression.getOrElse("")} is $traitName"
- }
-
- def isTrait(name: String) = new isTraitLeafExpression(name)
-
- case class isTraitUnaryExpression(traitName: String, child: Expression)
- extends Expression with UnaryNode {
- // validate TraitName
- typSystem.getDataType(classOf[TraitType], traitName)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- if (!child.dataType.isInstanceOf[ClassType]) {
- throw new ExpressionException(this,
- s"Cannot apply isTrait on ${child.dataType.getName}, it is not a ClassType")
- }
- DataTypes.BOOLEAN_TYPE
- }
-
- override def toString = s"$child is $traitName"
- }
-
- case class hasFieldLeafExpression(fieldName: String, classExpression: Option[Expression] = None)
- extends Expression with LeafNode {
-
- override lazy val resolved = classExpression.isDefined
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"Cannot apply hasField on ${classExpression.get.dataType.getName}, it is not a ClassType")
- }
- if (classExpression.isDefined && !TypeUtils.fieldMapping(classExpression.get.dataType).isDefined) {
- throw new ExpressionException(this, s"Cannot apply hasField on ${classExpression.get.dataType.getName}")
- }
- DataTypes.BOOLEAN_TYPE
- }
-
- override def toString = s"${classExpression.getOrElse("")} has $fieldName"
- }
-
- def hasField(name: String) = new hasFieldLeafExpression(name)
-
- case class hasFieldUnaryExpression(fieldName: String, child: Expression)
- extends Expression with UnaryNode {
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- if (!TypeUtils.fieldMapping(child.dataType).isDefined) {
- throw new AtlasException(s"Cannot apply hasField on ${child.dataType.getName}")
- }
- DataTypes.BOOLEAN_TYPE
- }
-
- override def toString = s"$child has $fieldName"
- }
-
- case class ComparisonExpression(symbol: String,
- left: Expression,
- right: Expression)
- extends BinaryExpression {
-
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
-
- if(left.dataType.getName.startsWith(DataTypes.ARRAY_TYPE_PREFIX)) {
- left.dataType;
- } else if(left.dataType == DataTypes.DATE_TYPE) {
- DataTypes.DATE_TYPE
- }
- else if(left.dataType == DataTypes.BOOLEAN_TYPE) {
- DataTypes.BOOLEAN_TYPE;
- }
- else if (left.dataType != DataTypes.STRING_TYPE || right.dataType != DataTypes.STRING_TYPE) {
- TypeUtils.combinedType(left.dataType, right.dataType)
- }
- DataTypes.BOOLEAN_TYPE
- }
- }
-
- case class LogicalExpression(symbol: String, children: List[Expression])
- extends Expression {
- assert(children.size > 0)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- children.foreach { childExpr =>
- if (childExpr.dataType != DataTypes.BOOLEAN_TYPE) {
- throw new AtlasException(
- s"Cannot apply logical operator '$symbol' on input of type '${childExpr.dataType}")
- }
- }
- DataTypes.BOOLEAN_TYPE
- }
-
- override def toString = children.mkString("", s" $symbol ", "")
- }
-
- case class FilterExpression(val child: Expression, val condExpr: Expression) extends Expression {
- val children = List(child, condExpr)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- if (condExpr.dataType != DataTypes.BOOLEAN_TYPE) {
- throw new ExpressionException(this, s"Filter condition '$condExpr' is not a boolean expression")
- }
- child.dataType
- }
-
- override def namedExpressions = child.namedExpressions ++ condExpr.namedExpressions
-
- override def toString = s"$child where $condExpr"
- }
-
- case class SelectExpression(child: Expression, selectList: List[Expression], forGroupBy: Boolean = false) extends Expression {
- val children = List(child) ::: selectList
-
- def hasAggregation = {
- var result = false;
- selectList.foreach { expr =>
- {
- result = result || expr.isAggregator
- }
- }
- result
- }
-
- lazy val selectListWithAlias = selectList.zipWithIndex map {
- case (s: AliasExpression, _) => s
- case (x, i) => new AliasExpression(x, s"${x}")
- }
-
-
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- TypeUtils.createStructType(selectListWithAlias)
- }
-
- override def namedExpressions = child.namedExpressions ++ (selectList.flatMap(_.namedExpressions))
-
- override def toString = {
- //When this is part of a group by, the child is only present so that the select
- //list gets translated correctly. It is not really part of the query. The child
- //ends up both in the GroupByExpression as well as here. We only want to show it
- //in the GroupByExpression. Hide it here.
- var prefix = if(forGroupBy) { "" } else { s"""${child} select """ }
- s"""${prefix}${selectListWithAlias.mkString("", ", ", "")}"""
- }
-
- def toJavaList = selectListWithAlias.asJava
- }
-
- case class LoopExpression(val input: Expression, val loopingExpression: Expression,
- val times: Option[Literal[Integer]]) extends Expression {
- val children = List(input, loopingExpression)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- if (input.dataType.getTypeCategory != TypeCategory.CLASS) {
- throw new ExpressionException(this, s"Loop Expression applied to type : '${input.dataType.getName}';" +
- " loop can only be applied to Class Expressions")
- }
- if (input.dataType != loopingExpression.dataType) {
- throw new ExpressionException(this,
- s"Invalid Loop Expression; input and loopExpression dataTypes don't match: " +
- s"(${input.dataType.getName},${loopingExpression.dataType.getName}})")
- }
- input.dataType
- }
-
- override def namedExpressions = input.namedExpressions
-
- override def toString = {
- if (times.isDefined) s"$input loop ($loopingExpression) times ${times.get.value}"
- else s"$input loop ($loopingExpression)"
- }
- }
-
- case class TraitInstanceExpression(child: Expression)
- extends Expression with UnaryNode {
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- if (!child.dataType.isInstanceOf[TraitType]) {
- throw new ExpressionException(this,
- s"Cannot apply instance on ${child.dataType.getName}, it is not a TraitType")
- }
- typSystem.getIdType.getStructType
- }
-
- override def toString = s"$child traitInstance"
- }
-
- case class InstanceExpression(child: Expression)
- extends Expression with UnaryNode {
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- typSystem.getIdType.getStructType
- }
-
- override def toString = s"$child instance"
- }
-
- case class PathExpression(child: Expression)
- extends Expression with UnaryNode {
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved child")
- }
- TypeUtils.ResultWithPathStruct.createType(this, child.dataType)
- }
-
- override def toString = s"$child withPath"
- }
-
- case class LimitExpression(child: Expression, limit: Literal[Integer], offset: Literal[Integer]) extends Expression with UnaryNode {
-
- override def toString = s"$child limit $limit offset $offset "
-
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- child.dataType
- }
- }
-
- case class OrderExpression(child: Expression, odr: Expression, asc: Boolean) extends Expression with UnaryNode {
-
- override def toString = s"$child orderby $odr asc $asc"
-
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- child.dataType
- }
- }
-
- case class CountExpression() extends Expression {
- override def isAggregator = true
- override def toString = s"count()"
- val children = Nil
- lazy val dataType = {
- DataTypes.LONG_TYPE
- }
-
- }
- case class MaxExpression(maxClause: Expression) extends Expression {
-
- override def toString = s"max($maxClause)"
- override def isAggregator = true
- val children = List(maxClause)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- maxClause.dataType
- }
- }
-
- case class MinExpression(minClause: Expression) extends Expression {
-
- override def toString = s"min($minClause)"
- override def isAggregator = true
- val children = List(minClause)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- minClause.dataType
- }
- }
-
- case class SumExpression(sumClause: Expression) extends Expression {
-
- override def toString = s"sum($sumClause)"
- override def isAggregator = true
- val children = List(sumClause)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- sumClause.dataType
- }
- }
-
- case class GroupByExpression(child: Expression, groupBy: SelectExpression, selExpr: SelectExpression) extends Expression{
-
- override def toString = s"from ${child} groupby(${groupBy}) select ${selExpr}"
- val children = List(child, groupBy, selExpr)
- lazy val dataType = {
- if (!resolved) {
- throw new UnresolvedException(this,
- s"datatype. Can not resolve due to unresolved children")
- }
- selExpr.dataType
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala b/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
deleted file mode 100755
index c2b3436..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/GraphPersistenceStrategies.scala
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import java.util
-import java.util.Date
-
-import scala.collection.JavaConversions._
-import scala.collection.JavaConversions.seqAsJavaList
-import scala.language.existentials
-
-import org.apache.atlas.groovy.GroovyExpression
-import org.apache.atlas.query.TypeUtils.FieldInfo
-import org.apache.atlas.repository.RepositoryException
-import org.apache.atlas.repository.graph.GraphHelper
-import org.apache.atlas.repository.graphdb._
-import org.apache.atlas.typesystem.ITypedInstance
-import org.apache.atlas.typesystem.ITypedReferenceableInstance
-import org.apache.atlas.typesystem.persistence.Id
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.types.DataTypes._
-
-/**
- * Represents the Bridge between the QueryProcessor and the Graph Persistence scheme used.
- * Some of the behaviors captured are:
- * - how is type and id information stored in the Vertex that represents an [[ITypedReferenceableInstance]]
- * - how are edges representing trait and attribute relationships labeled.
- * - how are attribute names mapped to Property Keys in Vertices.
- *
- * This is a work in progress.
- *
- */
-trait GraphPersistenceStrategies {
-
- @throws(classOf[RepositoryException])
- def getGraph() : AtlasGraph[_,_]
-
- def getSupportedGremlinVersion() : GremlinVersion = getGraph().getSupportedGremlinVersion;
- def generatePersisentToLogicalConversionExpression(expr: GroovyExpression, t: IDataType[_]) : GroovyExpression = getGraph().generatePersisentToLogicalConversionExpression(expr, t);
- def isPropertyValueConversionNeeded(attrType: IDataType[_]) : Boolean = getGraph().isPropertyValueConversionNeeded(attrType);
-
- def addInitialQueryCondition(parent: GroovyExpression) : GroovyExpression = if (getGraph().requiresInitialIndexedPredicate()) { getGraph().getInitialIndexedPredicate(parent) } else { parent };
-
- /**
- * Name of attribute used to store typeName in vertex
- */
- def typeAttributeName: String
-
- /**
- * Name of attribute used to store super type names in vertex.
- */
- def superTypeAttributeName: String
-
- /**
- * Name of attribute used to store guid in vertex
- */
- def idAttributeName : String
-
- /**
- * Name of attribute used to store state in vertex
- */
- def stateAttributeName : String
- /**
- * Name of attribute used to store version in vertex
- */
- def versionAttributeName : String
-
- /**
- * Given a dataType and a reference attribute, how is edge labeled
- */
- def edgeLabel(iDataType: IDataType[_], aInfo: AttributeInfo): String
-
- def traitLabel(cls: IDataType[_], traitName: String): String
-
- def instanceToTraitEdgeDirection : AtlasEdgeDirection = AtlasEdgeDirection.OUT;
-
- def traitToInstanceEdgeDirection : AtlasEdgeDirection = instanceToTraitEdgeDirection match {
- case AtlasEdgeDirection.OUT => AtlasEdgeDirection.IN;
- case AtlasEdgeDirection.IN => AtlasEdgeDirection.OUT;
- case x => AtlasEdgeDirection.IN;
- }
-
- /**
- * The propertyKey used to store the attribute in a Graph Vertex.
- * @param dataType
- * @param aInfo
- * @return
- */
- def fieldNameInVertex(dataType: IDataType[_], aInfo: AttributeInfo): String
-
- /**
- * from a vertex for an [[ITypedReferenceableInstance]] get the traits that it has.
- * @param v
- * @return
- */
- def traitNames(v: AtlasVertex[_,_]): java.util.List[String]
-
- def edgeLabel(fInfo: FieldInfo): String = fInfo match {
- case FieldInfo(dataType, aInfo, null, null) => edgeLabel(dataType, aInfo)
- case FieldInfo(dataType, aInfo, reverseDataType, null) => edgeLabel(reverseDataType, aInfo)
- case FieldInfo(dataType, null, null, traitName) => traitLabel(dataType, traitName)
- }
-
- /**
- * extract the Id from a Vertex.
- * @param dataTypeNm the dataType of the instance that the given vertex represents
- * @param v
- * @return
- */
- def getIdFromVertex(dataTypeNm: String, v: AtlasVertex[_,_]): Id
-
- def constructInstance[U](dataType: IDataType[U], v: java.lang.Object): U
-
- def constructClassInstanceId[U](dataType: ClassType, v: java.lang.Object): ITypedReferenceableInstance
-
- def addGraphVertexPrefix(preStatements : Traversable[GroovyExpression]) = !collectTypeInstancesIntoVar
-
- /**
- * Controls behavior of how instances of a Type are discovered.
- * - query is generated in a way that indexes are exercised using a local set variable across multiple lookups
- * - query is generated using an 'or' expression.
- *
- * '''This is a very bad idea: controlling query execution behavior via query generation.''' But our current
- * knowledge of seems to indicate we have no choice. See
- * [[https://groups.google.com/forum/#!topic/gremlin-users/n1oV86yr4yU discussion in Gremlin group]].
- * Also this seems a fragile solution, dependend on the memory requirements of the Set variable.
- * For now enabling via the '''collectTypeInstancesIntoVar''' behavior setting. Reverting back would require
- * setting this to false.
- *
- * Long term have to get to the bottom of Gremlin:
- * - there doesn't seem to be way to see the physical query plan. Maybe we should directly interface with Titan.
- * - At least from querying perspective a columnar db maybe a better route. Daniel Abadi did some good work
- * on showing how to use a columnar store as a Graph Db.
- *
- *
- * @return
- */
- def collectTypeInstancesIntoVar = true
-
- def filterBySubTypes = true
-
- private def propertyValueSet(vertexRef : String, attrName: String) : String = {
- s"""org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils.set(${vertexRef}.values('${attrName})"""
- }
-
-
-}
-
-
-case class GraphPersistenceStrategy1(g: AtlasGraph[_,_]) extends GraphPersistenceStrategies {
-
- val typeAttributeName = "typeName"
- val superTypeAttributeName = "superTypeNames"
- val idAttributeName = "guid"
- val stateAttributeName = "state"
- val versionAttributeName = "version"
-
- override def getGraph() : AtlasGraph[_,_] = {
- return g;
- }
-
- def edgeLabel(dataType: IDataType[_], aInfo: AttributeInfo) = s"__${dataType.getName}.${aInfo.name}"
-
- def edgeLabel(propertyName: String) = s"__${propertyName}"
-
- def traitLabel(cls: IDataType[_], traitName: String) = s"${cls.getName}.$traitName"
-
- def fieldNameInVertex(dataType: IDataType[_], aInfo: AttributeInfo) = GraphHelper.getQualifiedFieldName(dataType, aInfo.name)
-
- def getIdFromVertex(dataTypeNm: String, v: AtlasVertex[_,_]): Id =
- new Id(v.getId.toString, 0, dataTypeNm)
-
- def getIdFromVertex(v: AtlasVertex[_,_]): Id =
- getIdFromVertex(v.getProperty(typeAttributeName, classOf[java.lang.String]), v)
-
- def traitNames(v: AtlasVertex[_,_]): java.util.List[String] = {
- val s = v.getProperty("traitNames", classOf[String])
- if (s != null) {
- Seq[String](s.split(","): _*)
- } else {
- Seq()
- }
- }
- def constructClassInstanceId[U](classType: ClassType, v: AnyRef): ITypedReferenceableInstance = {
- val vertex = v.asInstanceOf[AtlasVertex[_,_]];
- val id = getIdFromVertex(vertex)
- val cInstance = classType.createInstance(id)
- classType.convert(cInstance, Multiplicity.OPTIONAL)
- }
- def constructInstance[U](dataType: IDataType[U], v: AnyRef): U = {
- dataType.getTypeCategory match {
- case DataTypes.TypeCategory.PRIMITIVE => dataType.convert(v, Multiplicity.OPTIONAL)
- case DataTypes.TypeCategory.ARRAY =>
- dataType.convert(v, Multiplicity.OPTIONAL)
- case DataTypes.TypeCategory.STRUCT
- if dataType.getName == TypeSystem.getInstance().getIdType.getName => {
- val sType = dataType.asInstanceOf[StructType]
- val sInstance = sType.createInstance()
- val tV = v.asInstanceOf[AtlasVertex[_,_]]
- sInstance.set(TypeSystem.getInstance().getIdType.typeNameAttrName,
- tV.getProperty(typeAttributeName, classOf[java.lang.String]))
- sInstance.set(TypeSystem.getInstance().getIdType.idAttrName,
- tV.getProperty(idAttributeName, classOf[java.lang.String]))
- dataType.convert(sInstance, Multiplicity.OPTIONAL)
- }
- case DataTypes.TypeCategory.STRUCT => {
- val sType = dataType.asInstanceOf[StructType]
- val sInstance = sType.createInstance()
- loadStructInstance(sType, sInstance, v.asInstanceOf[AtlasVertex[_,_]])
- dataType.convert(sInstance, Multiplicity.OPTIONAL)
- }
- case DataTypes.TypeCategory.TRAIT => {
- val tType = dataType.asInstanceOf[TraitType]
- val tInstance = tType.createInstance()
- /*
- * this is not right, we should load the Instance associated with this trait.
- * for now just loading the trait struct.
- */
- loadStructInstance(tType, tInstance, v.asInstanceOf[AtlasVertex[_,_]])
- dataType.convert(tInstance, Multiplicity.OPTIONAL)
- }
- case DataTypes.TypeCategory.CLASS => {
- val cType = dataType.asInstanceOf[ClassType]
- val cInstance = constructClassInstance(dataType.asInstanceOf[ClassType], v.asInstanceOf[AtlasVertex[_,_]])
- dataType.convert(cInstance, Multiplicity.OPTIONAL)
- }
- case DataTypes.TypeCategory.ENUM => dataType.convert(v, Multiplicity.OPTIONAL)
- case x => throw new UnsupportedOperationException(s"load for ${dataType} not supported")
- }
- }
-
- def loadStructInstance(dataType: IConstructableType[_, _ <: ITypedInstance],
- typInstance: ITypedInstance, v: AtlasVertex[_,_]): Unit = {
- import scala.collection.JavaConversions._
- dataType.fieldMapping().fields.foreach { t =>
- val fName = t._1
- val aInfo = t._2
- loadAttribute(dataType, aInfo, typInstance, v)
- }
- }
-
- def constructClassInstance(dataType: ClassType, v: AtlasVertex[_,_]): ITypedReferenceableInstance = {
- val id = getIdFromVertex(dataType.name, v)
- val tNms = traitNames(v)
- val cInstance = dataType.createInstance(id, tNms: _*)
- // load traits
- tNms.foreach { tNm =>
- val tLabel = traitLabel(dataType, tNm)
- val edges = v.getEdges(AtlasEdgeDirection.OUT, tLabel)
- val tVertex = edges.iterator().next().getInVertex().asInstanceOf[AtlasVertex[_,_]]
- val tType = TypeSystem.getInstance().getDataType[TraitType](classOf[TraitType], tNm)
- val tInstance = cInstance.getTrait(tNm).asInstanceOf[ITypedInstance]
- loadStructInstance(tType, tInstance, tVertex)
- }
- loadStructInstance(dataType, cInstance, v)
- cInstance
- }
-
- def loadAttribute(dataType: IDataType[_], aInfo: AttributeInfo, i: ITypedInstance, v: AtlasVertex[_,_]): Unit = {
- aInfo.dataType.getTypeCategory match {
- case DataTypes.TypeCategory.PRIMITIVE => loadPrimitiveAttribute(dataType, aInfo, i, v)
- case DataTypes.TypeCategory.ENUM => loadEnumAttribute(dataType, aInfo, i, v)
- case DataTypes.TypeCategory.ARRAY =>
- loadArrayAttribute(dataType, aInfo, i, v)
- case DataTypes.TypeCategory.MAP =>
- throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported")
- case DataTypes.TypeCategory.STRUCT => loadStructAttribute(dataType, aInfo, i, v)
- case DataTypes.TypeCategory.TRAIT =>
- throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported")
- case DataTypes.TypeCategory.CLASS => loadStructAttribute(dataType, aInfo, i, v)
- case DataTypes.TypeCategory.RELATIONSHIP =>
- throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported")
- }
- }
-
- private def loadEnumAttribute(dataType: IDataType[_], aInfo: AttributeInfo, i: ITypedInstance, v: AtlasVertex[_,_])
- : Unit = {
- val fName = fieldNameInVertex(dataType, aInfo)
- i.setInt(aInfo.name, v.getProperty(fName, classOf[java.lang.Integer]))
- }
-
- private def loadPrimitiveAttribute(dataType: IDataType[_], aInfo: AttributeInfo,
- i: ITypedInstance, v: AtlasVertex[_,_]): Unit = {
- val fName = fieldNameInVertex(dataType, aInfo)
- aInfo.dataType() match {
- case x: BooleanType => i.setBoolean(aInfo.name, v.getProperty(fName, classOf[java.lang.Boolean]))
- case x: ByteType => i.setByte(aInfo.name, v.getProperty(fName, classOf[java.lang.Byte]))
- case x: ShortType => i.setShort(aInfo.name, v.getProperty(fName, classOf[java.lang.Short]))
- case x: IntType => i.setInt(aInfo.name, v.getProperty(fName, classOf[java.lang.Integer]))
- case x: LongType => i.setLong(aInfo.name, v.getProperty(fName, classOf[java.lang.Long]))
- case x: FloatType => i.setFloat(aInfo.name, v.getProperty(fName, classOf[java.lang.Float]))
- case x: DoubleType => i.setDouble(aInfo.name, v.getProperty(fName, classOf[java.lang.Double]))
- case x: StringType => i.setString(aInfo.name, v.getProperty(fName, classOf[java.lang.String]))
- case x: DateType => {
- val dateVal = v.getProperty(fName, classOf[java.lang.Long])
- i.setDate(aInfo.name, new Date(dateVal))
- }
- case _ => throw new UnsupportedOperationException(s"load for ${aInfo.dataType()} not supported")
- }
- }
-
-
- private def loadArrayAttribute[T](dataType: IDataType[_], aInfo: AttributeInfo,
- i: ITypedInstance, v: AtlasVertex[_,_]): Unit = {
- import scala.collection.JavaConversions._
- val list: java.util.List[_] = v.getListProperty(aInfo.name)
- val arrayType: DataTypes.ArrayType = aInfo.dataType.asInstanceOf[ArrayType]
-
- var values = new util.ArrayList[Any]
- list.foreach( listElement =>
- values += mapVertexToCollectionEntry(v, aInfo, arrayType.getElemType, i, listElement)
- )
- i.set(aInfo.name, values)
- }
-
- private def loadStructAttribute(dataType: IDataType[_], aInfo: AttributeInfo,
- i: ITypedInstance, v: AtlasVertex[_,_], edgeLbl: Option[String] = None): Unit = {
- val eLabel = edgeLbl match {
- case Some(x) => x
- case None => edgeLabel(FieldInfo(dataType, aInfo, null))
- }
- val edges = v.getEdges(AtlasEdgeDirection.OUT, eLabel)
- val sVertex = edges.iterator().next().getInVertex().asInstanceOf[AtlasVertex[_,_]]
- if (aInfo.dataType().getTypeCategory == DataTypes.TypeCategory.STRUCT) {
- val sType = aInfo.dataType().asInstanceOf[StructType]
- val sInstance = sType.createInstance()
- loadStructInstance(sType, sInstance, sVertex)
- i.set(aInfo.name, sInstance)
- } else {
- val cInstance = constructClassInstance(aInfo.dataType().asInstanceOf[ClassType], sVertex)
- i.set(aInfo.name, cInstance)
- }
- }
-
- private def mapVertexToCollectionEntry(instanceVertex: AtlasVertex[_,_], attributeInfo: AttributeInfo, elementType: IDataType[_], i: ITypedInstance, value: Any): Any = {
- elementType.getTypeCategory match {
- case DataTypes.TypeCategory.PRIMITIVE => value
- case DataTypes.TypeCategory.ENUM => value
- case DataTypes.TypeCategory.STRUCT =>
- throw new UnsupportedOperationException(s"load for ${attributeInfo.dataType()} not supported")
- case DataTypes.TypeCategory.TRAIT =>
- throw new UnsupportedOperationException(s"load for ${attributeInfo.dataType()} not supported")
- case DataTypes.TypeCategory.CLASS => //loadStructAttribute(elementType, attributeInfo, i, v)
- throw new UnsupportedOperationException(s"load for ${attributeInfo.dataType()} not supported")
- case _ =>
- throw new UnsupportedOperationException(s"load for ${attributeInfo.dataType()} not supported")
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/scala/org/apache/atlas/query/GremlinEvaluator.scala
----------------------------------------------------------------------
diff --git a/repository/src/main/scala/org/apache/atlas/query/GremlinEvaluator.scala b/repository/src/main/scala/org/apache/atlas/query/GremlinEvaluator.scala
deleted file mode 100755
index 5f8ef8a..0000000
--- a/repository/src/main/scala/org/apache/atlas/query/GremlinEvaluator.scala
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-
-import org.apache.atlas.query.Expressions._
-import org.apache.atlas.query.TypeUtils.ResultWithPathStruct
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.typesystem.json._
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory
-import org.apache.atlas.typesystem.types._
-import org.json4s._
-import org.json4s.native.Serialization._
-
-import scala.language.existentials
-
-case class GremlinQueryResult(query: String,
- resultDataType: IDataType[_],
- rows: java.util.List[_]) {
- def toJson = JsonHelper.toJson(this)
-}
-
-class GremlinEvaluator(qry: GremlinQuery, persistenceStrategy: GraphPersistenceStrategies, g: AtlasGraph[_,_]) {
-
- /**
- *
- * @param gResultObj is the object returned from gremlin. This must be a List
- * @param qryResultObj is the object constructed for the output w/o the Path.
- * @return a ResultWithPathStruct
- */
- def addPathStruct(gResultObj: AnyRef, qryResultObj: Any): Any = {
- if (!qry.isPathExpression) {
- qryResultObj
- } else {
- import scala.collection.JavaConversions._
- import scala.collection.JavaConverters._
-
- val iPaths = gResultObj.asInstanceOf[java.util.List[AnyRef]].init
-
- val oPaths = iPaths.map { value =>
- persistenceStrategy.constructInstance(TypeSystem.getInstance().getIdType.getStructType, value)
- }.toList.asJava
- val sType = qry.expr.dataType.asInstanceOf[StructType]
- val sInstance = sType.createInstance()
- sInstance.set(ResultWithPathStruct.pathAttrName, oPaths)
- sInstance.set(ResultWithPathStruct.resultAttrName, qryResultObj)
- sInstance
- }
- }
-
- def instanceObject(v: AnyRef): AnyRef = {
- if (qry.isPathExpression) {
- import scala.collection.JavaConversions._
- v.asInstanceOf[java.util.List[AnyRef]].last
- } else {
- v
- }
- }
-
- def evaluate(): GremlinQueryResult = {
- import scala.collection.JavaConversions._
- val debug:Boolean = false
- val rType = qry.expr.dataType
- val oType = if (qry.isPathExpression) {
- qry.expr.children(0).dataType
- }
- else {
- rType
- }
- val rawRes = g.executeGremlinScript(qry.queryStr, qry.isPathExpression);
- if(debug) {
- println(" rawRes " +rawRes)
- }
- if (!qry.hasSelectList && ! qry.isGroupBy) {
- val rows = rawRes.asInstanceOf[java.util.List[AnyRef]].map { v =>
- val instObj = instanceObject(v)
- val o = persistenceStrategy.constructInstance(oType, instObj)
- addPathStruct(v, o)
- }
- GremlinQueryResult(qry.expr.toString, rType, rows.toList)
- } else {
- val sType = oType.asInstanceOf[StructType]
- val rows = rawRes.asInstanceOf[java.util.List[AnyRef]].map { r =>
- val rV = instanceObject(r)
- val sInstance = sType.createInstance()
- val selObj = SelectExpressionHelper.extractSelectExpression(qry.expr)
- if (selObj.isDefined) {
- val selExpr = selObj.get.asInstanceOf[Expressions.SelectExpression]
- selExpr.selectListWithAlias.foreach { aE =>
- val cName = aE.alias
- val (src, idx) = qry.resultMaping(cName)
- val v = getColumnValue(rV, src, idx)
- //if select clause is selecting the entire object then return only the instance id (guid, version, state and typeName)
- if (aE.dataType.getTypeCategory == TypeCategory.CLASS) {
- sInstance.set(cName, persistenceStrategy.constructClassInstanceId(aE.dataType.asInstanceOf[ClassType], v))
- } else {
- sInstance.set(cName, persistenceStrategy.constructInstance(aE.dataType, v))
- }
- }
- }
- else if(qry.isGroupBy) {
- //the order in the result will always match the order in the select list
- val selExpr = qry.expr.asInstanceOf[GroupByExpression].selExpr
- var idx = 0;
- val row : java.util.List[Object] = rV.asInstanceOf[java.util.List[Object]]
- selExpr.selectListWithAlias.foreach { aE =>
- val cName = aE.alias
- val cValue = row.get(idx);
-
- sInstance.set(cName, persistenceStrategy.constructInstance(aE.dataType, cValue))
- idx += 1;
- }
- }
- addPathStruct(r, sInstance)
- }
- GremlinQueryResult(qry.expr.toString, rType, rows.toList)
- }
-
- }
-
- private def getColumnValue(rowValue: AnyRef, colName: String, idx: Integer) : AnyRef = {
-
- var rawColumnValue: AnyRef = null;
- if(rowValue.isInstanceOf[java.util.Map[_,_]]) {
- val columnsMap = rowValue.asInstanceOf[java.util.Map[String,AnyRef]];
- rawColumnValue = columnsMap.get(colName);
- }
- else {
- //when there is only one column, result does not come back as a map
- rawColumnValue = rowValue;
- }
-
- var value : AnyRef = null;
- if(rawColumnValue.isInstanceOf[java.util.List[_]] && idx >= 0) {
- val arr = rawColumnValue.asInstanceOf[java.util.List[AnyRef]];
- value = arr.get(idx);
- }
- else {
- value = rawColumnValue;
- }
-
- return value;
- }
-}
-
-object JsonHelper {
-
- class GremlinQueryResultSerializer()
- extends Serializer[GremlinQueryResult] {
- def deserialize(implicit format: Formats) = {
- throw new UnsupportedOperationException("Deserialization of GremlinQueryResult not supported")
- }
-
- def serialize(implicit f: Formats) = {
- case GremlinQueryResult(query, rT, rows) =>
- JObject(JField("query", JString(query)),
- JField("dataType", TypesSerialization.toJsonValue(rT)(f)),
- JField("rows", Extraction.decompose(rows)(f))
- )
- }
- }
-
- implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer +
- new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer +
- new GremlinQueryResultSerializer
-
- def toJson(r: GremlinQueryResult): String = {
- writePretty(r)
- }
-}
[42/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
ATLAS-2251: Remove TypeSystem and related implementation, to avoid unncessary duplicate of type details in cache
Project: http://git-wip-us.apache.org/repos/asf/atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/atlas/commit/435fe3fb
Tree: http://git-wip-us.apache.org/repos/asf/atlas/tree/435fe3fb
Diff: http://git-wip-us.apache.org/repos/asf/atlas/diff/435fe3fb
Branch: refs/heads/master
Commit: 435fe3fbad0dbb41297cc7af7986d3062704d7ee
Parents: 0abf84c
Author: Madhan Neethiraj <ma...@apache.org>
Authored: Fri Nov 3 22:45:05 2017 -0700
Committer: Madhan Neethiraj <ma...@apache.org>
Committed: Sun Nov 12 10:08:25 2017 -0800
----------------------------------------------------------------------
addons/falcon-bridge/pom.xml | 61 +-
.../atlas/falcon/bridge/FalconBridge.java | 4 +-
.../apache/atlas/falcon/hook/FalconHook.java | 9 +-
.../apache/atlas/falcon/hook/FalconHookIT.java | 20 +-
addons/hbase-bridge/pom.xml | 97 +-
.../atlas/hbase/bridge/HBaseAtlasHook.java | 47 +-
.../hbase/model/HBaseOperationContext.java | 8 +-
.../apache/atlas/hbase/HBaseAtlasHookIT.java | 2 +-
addons/hdfs-model/pom.xml | 66 -
addons/hive-bridge/pom.xml | 56 +-
.../atlas/hive/bridge/ColumnLineageUtils.java | 2 +-
.../atlas/hive/bridge/HiveMetaStoreBridge.java | 18 +-
.../org/apache/atlas/hive/hook/HiveHook.java | 29 +-
.../java/org/apache/atlas/hive/HiveITBase.java | 4 +-
.../hive/bridge/HiveMetaStoreBridgeTest.java | 4 +-
.../hive/bridge/HiveMetastoreBridgeIT.java | 8 +-
.../org/apache/atlas/hive/hook/HiveHookIT.java | 26 +-
addons/sqoop-bridge/pom.xml | 61 +-
.../org/apache/atlas/sqoop/hook/SqoopHook.java | 9 +-
addons/storm-bridge/pom.xml | 115 +-
.../apache/atlas/storm/hook/StormAtlasHook.java | 2 +-
.../atlas/storm/hook/StormAtlasHookIT.java | 2 +-
authorization/pom.xml | 7 +
client/client-v1/pom.xml | 9 +-
.../main/java/org/apache/atlas/AtlasClient.java | 54 +-
.../java/org/apache/atlas/EntityAuditEvent.java | 19 +-
.../src/main/java/org/apache/atlas/SerDe.java | 80 -
.../java/org/apache/atlas/AtlasClientTest.java | 6 +-
client/pom.xml | 12 +-
common/pom.xml | 1 +
.../org/apache/atlas/AtlasConfiguration.java | 83 --
.../org/apache/atlas/repository/Constants.java | 7 +
graphdb/api/pom.xml | 6 +-
.../atlas/repository/graphdb/AtlasGraph.java | 12 +-
graphdb/common/pom.xml | 6 +
.../graphdb/janus/AtlasJanusGraph.java | 6 +-
.../repository/graphdb/titan0/Titan0Graph.java | 6 +-
.../repository/graphdb/titan1/Titan1Graph.java | 6 +-
intg/pom.xml | 26 +-
.../org/apache/atlas/AtlasConfiguration.java | 83 ++
.../AtlasNotificationBaseMessage.java | 208 +++
.../notification/AtlasNotificationMessage.java | 101 ++
.../AtlasNotificationStringMessage.java | 82 ++
.../model/notification/EntityNotification.java | 86 ++
.../model/notification/HookNotification.java | 103 ++
.../model/notification/MessageVersion.java | 170 +++
.../java/org/apache/atlas/type/AtlasType.java | 167 ++-
.../apache/atlas/type/AtlasTypeRegistry.java | 4 +-
.../org/apache/atlas/type/AtlasTypeUtil.java | 157 +-
.../atlas/typesystem/types/DataTypes.java | 35 +
.../model/instance/AtlasSystemAttributes.java | 156 ++
.../org/apache/atlas/v1/model/instance/Id.java | 270 ++++
.../atlas/v1/model/instance/Referenceable.java | 252 ++++
.../apache/atlas/v1/model/instance/Struct.java | 211 +++
.../model/lineage/DataSetLineageResponse.java | 65 +
.../atlas/v1/model/lineage/LineageResponse.java | 75 +
.../atlas/v1/model/lineage/SchemaResponse.java | 116 ++
.../notification/EntityNotificationV1.java | 231 +++
.../model/notification/HookNotificationV1.java | 357 +++++
.../v1/model/typedef/AttributeDefinition.java | 179 +++
.../v1/model/typedef/ClassTypeDefinition.java | 57 +
.../v1/model/typedef/EnumTypeDefinition.java | 174 +++
.../typedef/HierarchicalTypeDefinition.java | 96 ++
.../atlas/v1/model/typedef/Multiplicity.java | 167 +++
.../v1/model/typedef/StructTypeDefinition.java | 119 ++
.../v1/model/typedef/TraitTypeDefinition.java | 51 +
.../apache/atlas/v1/model/typedef/TypesDef.java | 118 ++
.../v1/typesystem/types/utils/TypesUtil.java | 112 ++
.../apache/atlas/TestRelationshipUtilsV2.java | 26 +-
.../test/java/org/apache/atlas/TestUtilsV2.java | 144 +-
.../atlas/type/TestAtlasRelationshipType.java | 14 +-
.../test/resources/atlas-application.properties | 145 ++
intg/src/test/resources/policy-store.txt | 9 +
intg/src/test/resources/sampleInstance.json | 72 +
intg/src/test/resources/sampleTypes.json | 633 ++++++++
.../test/resources/users-credentials.properties | 3 +
notification/pom.xml | 33 +-
.../java/org/apache/atlas/hook/AtlasHook.java | 30 +-
.../apache/atlas/kafka/AtlasKafkaConsumer.java | 18 +-
.../apache/atlas/kafka/KafkaNotification.java | 5 +-
.../AbstractMessageDeserializer.java | 132 +-
.../notification/AbstractNotification.java | 40 +-
.../AbstractNotificationConsumer.java | 19 +-
.../AtlasNotificationBaseMessage.java | 194 ---
.../notification/AtlasNotificationMessage.java | 87 --
.../AtlasNotificationMessageDeserializer.java | 59 +-
.../AtlasNotificationStringMessage.java | 66 -
.../atlas/notification/MessageVersion.java | 141 --
.../notification/NotificationInterface.java | 49 +-
.../notification/SplitMessageAggregator.java | 2 +
.../entity/EntityMessageDeserializer.java | 41 +-
.../notification/entity/EntityNotification.java | 65 -
.../entity/EntityNotificationImpl.java | 170 ---
.../hook/HookMessageDeserializer.java | 27 +-
.../notification/hook/HookNotification.java | 275 ----
.../org/apache/atlas/hook/AtlasHookTest.java | 53 +-
.../apache/atlas/kafka/KafkaConsumerTest.java | 130 +-
.../atlas/kafka/KafkaNotificationMockTest.java | 2 +-
.../atlas/kafka/KafkaNotificationTest.java | 37 +-
.../AbstractNotificationConsumerTest.java | 103 +-
.../notification/AbstractNotificationTest.java | 64 +-
.../AtlasNotificationMessageTest.java | 2 +
.../atlas/notification/MessageVersionTest.java | 1 +
.../SplitMessageAggregatorTest.java | 3 +-
.../entity/EntityMessageDeserializerTest.java | 73 -
.../EntityNotificationDeserializerTest.java | 71 +
.../entity/EntityNotificationImplTest.java | 149 --
.../entity/EntityNotificationTest.java | 128 ++
.../hook/HookMessageDeserializerTest.java | 170 ---
.../hook/HookNotificationDeserializerTest.java | 167 +++
.../notification/hook/HookNotificationTest.java | 40 +-
pom.xml | 154 +-
repository/pom.xml | 85 +-
.../atlas/GraphTransactionInterceptor.java | 2 +-
.../atlas/discovery/AtlasLineageService.java | 16 +
.../atlas/discovery/DataSetLineageService.java | 233 ---
.../atlas/discovery/DiscoveryService.java | 59 -
.../atlas/discovery/EntityDiscoveryService.java | 22 +-
.../atlas/discovery/EntityLineageService.java | 86 +-
.../apache/atlas/discovery/SearchIndexer.java | 4 +-
.../graph/DefaultGraphPersistenceStrategy.java | 292 ----
.../graph/GraphBackedDiscoveryService.java | 269 ----
.../gremlin/Gremlin2ExpressionFactory.java | 379 -----
.../gremlin/Gremlin3ExpressionFactory.java | 485 ------
.../atlas/gremlin/GremlinExpressionFactory.java | 658 ---------
.../atlas/gremlin/optimizer/AliasFinder.java | 103 --
.../gremlin/optimizer/CallHierarchyVisitor.java | 62 -
.../optimizer/ExpandAndsOptimization.java | 127 --
.../optimizer/ExpandOrsOptimization.java | 588 --------
.../gremlin/optimizer/ExpressionFinder.java | 69 -
.../gremlin/optimizer/FunctionGenerator.java | 326 -----
.../gremlin/optimizer/GremlinOptimization.java | 48 -
.../optimizer/GremlinQueryOptimizer.java | 262 ----
.../gremlin/optimizer/HasForbiddenType.java | 52 -
.../apache/atlas/gremlin/optimizer/IsOr.java | 48 -
.../atlas/gremlin/optimizer/IsOrParent.java | 60 -
.../gremlin/optimizer/OptimizationContext.java | 116 --
.../atlas/gremlin/optimizer/OrderFinder.java | 68 -
.../gremlin/optimizer/PathExpressionFinder.java | 61 -
.../atlas/gremlin/optimizer/RangeFinder.java | 68 -
.../optimizer/RepeatExpressionFinder.java | 65 -
.../gremlin/optimizer/SplitPointFinder.java | 161 --
.../gremlin/optimizer/UpdatedExpressions.java | 45 -
.../org/apache/atlas/query/Expressions.java | 45 +
.../org/apache/atlas/query/GremlinQuery.java | 42 +
.../apache/atlas/query/GremlinTranslator.java | 34 +
.../org/apache/atlas/query/QueryParams.java | 50 +
.../org/apache/atlas/query/QueryParser.java | 43 +
.../org/apache/atlas/query/QueryProcessor.java | 28 +
.../atlas/query/SelectExpressionHelper.java | 30 +
.../atlas/repository/DiscoverInstances.java | 77 -
.../apache/atlas/repository/IRepository.java | 51 -
.../atlas/repository/MetadataRepository.java | 198 ---
.../repository/RepositoryConfiguration.java | 37 -
.../repository/audit/EntityAuditListener.java | 115 +-
.../AtlasClassificationFormatConverter.java | 18 +-
.../converters/AtlasEntityFormatConverter.java | 87 +-
.../converters/AtlasEnumFormatConverter.java | 6 +-
.../converters/AtlasInstanceConverter.java | 218 ++-
.../converters/AtlasObjectIdConverter.java | 58 +-
.../converters/AtlasStructFormatConverter.java | 131 +-
.../converters/TypeConverterUtil.java | 259 ++--
.../atlas/repository/graph/DeleteHandler.java | 468 ------
.../atlas/repository/graph/EntityProcessor.java | 82 --
.../atlas/repository/graph/FullTextMapper.java | 174 ---
.../repository/graph/FullTextMapperV2.java | 4 +-
.../graph/GraphBackedMetadataRepository.java | 505 -------
.../graph/GraphBackedSearchIndexer.java | 386 ++---
.../atlas/repository/graph/GraphHelper.java | 179 +--
.../graph/GraphSchemaInitializer.java | 50 -
.../graph/GraphToTypedInstanceMapper.java | 452 ------
.../repository/graph/HardDeleteHandler.java | 48 -
.../repository/graph/SoftDeleteHandler.java | 74 -
.../graph/TypedInstanceToGraphMapper.java | 941 ------------
.../repository/graph/VertexLookupContext.java | 166 ---
.../repository/memory/AttributeStores.java | 632 --------
.../atlas/repository/memory/ClassStore.java | 93 --
.../memory/HierarchicalTypeStore.java | 207 ---
.../repository/memory/IAttributeStore.java | 49 -
.../atlas/repository/memory/MemRepository.java | 299 ----
.../memory/ReplaceIdWithInstance.java | 132 --
.../atlas/repository/memory/StructStore.java | 87 --
.../atlas/repository/memory/TraitStore.java | 57 -
.../store/graph/AtlasEntityStore.java | 9 +
.../graph/v1/AtlasEntityChangeNotifier.java | 34 +-
.../store/graph/v1/AtlasEntityStoreV1.java | 20 +
.../store/graph/v1/AtlasGraphUtilsV1.java | 19 +
.../store/graph/v1/AtlasStructDefStoreV1.java | 12 +-
.../typestore/GraphBackedTypeStore.java | 394 -----
.../atlas/repository/typestore/ITypeStore.java | 53 -
.../repository/typestore/StorageException.java | 35 -
.../typestore/StoreBackedTypeCache.java | 239 ---
.../typestore/TypePersistenceVisitor.java | 116 --
.../repository/typestore/TypeVertexFinder.java | 103 --
.../repository/typestore/TypeVertexInfo.java | 94 --
.../atlas/repository/typestore/TypeVisitor.java | 96 --
.../atlas/services/DefaultMetadataService.java | 821 -----------
.../util/AtlasRepositoryConfiguration.java | 28 -
.../apache/atlas/util/AttributeValueMap.java | 4 +-
.../atlas/util/CompiledQueryCacheKey.java | 87 --
.../org/apache/atlas/util/IndexedInstance.java | 8 +-
.../org/apache/atlas/util/NoopGremlinQuery.java | 39 -
.../org/apache/atlas/util/TypeDefSorter.java | 83 --
.../org/apache/atlas/query/ClosureQuery.scala | 327 -----
.../org/apache/atlas/query/Expressions.scala | 923 ------------
.../query/GraphPersistenceStrategies.scala | 367 -----
.../apache/atlas/query/GremlinEvaluator.scala | 186 ---
.../org/apache/atlas/query/GremlinQuery.scala | 806 ----------
.../org/apache/atlas/query/QueryParser.scala | 557 -------
.../org/apache/atlas/query/QueryProcessor.scala | 143 --
.../scala/org/apache/atlas/query/Resolver.scala | 182 ---
.../org/apache/atlas/query/TypeUtils.scala | 279 ----
.../org/apache/atlas/BaseRepositoryTest.java | 428 ------
.../atlas/RepositoryServiceLoadingTest.java | 39 -
.../test/java/org/apache/atlas/TestModules.java | 66 +-
.../test/java/org/apache/atlas/TestUtils.java | 778 ----------
.../discovery/DataSetLineageServiceTest.java | 497 -------
.../discovery/EntityDiscoveryServiceTest.java | 144 --
.../GraphBackedDiscoveryServiceTest.java | 1334 -----------------
.../atlas/lineage/EntityLineageServiceTest.java | 356 -----
.../apache/atlas/query/QueryProcessorTest.java | 157 --
.../org/apache/atlas/repository/BaseTest.java | 30 -
.../audit/AuditRepositoryTestBase.java | 35 +-
.../audit/HBaseBasedAuditRepositoryHATest.java | 96 --
.../audit/HBaseBasedAuditRepositoryTest.java | 72 -
.../atlas/repository/audit/HBaseTestUtils.java | 53 -
.../AbstractGremlinQueryOptimizerTest.java | 708 ---------
...hBackedMetadataRepositoryDeleteTestBase.java | 1205 ---------------
.../GraphBackedMetadataRepositoryTest.java | 864 -----------
.../GraphBackedRepositoryHardDeleteTest.java | 212 ---
.../GraphBackedRepositorySoftDeleteTest.java | 239 ---
.../graph/GraphBackedSearchIndexerMockTest.java | 91 --
.../graph/GraphBackedSearchIndexerTest.java | 177 ---
.../repository/graph/GraphHelperMockTest.java | 121 --
.../atlas/repository/graph/GraphHelperTest.java | 218 ---
.../graph/GraphRepoMapperScaleTest.java | 241 ---
.../graph/Gremlin2QueryOptimizerTest.java | 364 -----
.../graph/Gremlin3QueryOptimizerTest.java | 367 -----
.../ReverseReferenceUpdateHardDeleteTest.java | 46 -
.../ReverseReferenceUpdateSoftDeleteTest.java | 79 -
.../graph/ReverseReferenceUpdateTestBase.java | 275 ----
.../atlas/repository/graph/TestIntSequence.java | 35 -
.../repository/impexp/ExportServiceTest.java | 2 +-
.../graph/v1/AtlasDeleteHandlerV1Test.java | 1141 ---------------
.../graph/v1/AtlasEntityDefStoreV1Test.java | 5 +-
.../store/graph/v1/AtlasEntityStoreV1Test.java | 44 +-
.../graph/v1/AtlasRelationshipStoreV1Test.java | 2 +-
.../store/graph/v1/HardDeleteHandlerV1Test.java | 177 ---
.../graph/v1/InverseReferenceUpdateV1Test.java | 42 +-
.../store/graph/v1/SoftDeleteHandlerV1Test.java | 215 ---
.../typestore/GraphBackedTypeStoreTest.java | 256 ----
.../typestore/StoreBackedTypeCacheTest.java | 169 ---
.../service/DefaultMetadataServiceTest.java | 1381 ------------------
...StoreBackedTypeCacheMetadataServiceTest.java | 143 --
.../atlas/util/CompiledQueryCacheKeyTest.java | 104 --
.../java/org/apache/atlas/utils/HiveModel.java | 303 ----
.../org/apache/atlas/query/ExpressionTest.scala | 172 ---
.../org/apache/atlas/query/GremlinTest.scala | 1068 --------------
.../org/apache/atlas/query/GremlinTest2.scala | 154 --
.../apache/atlas/query/HiveTitanSample.scala | 243 ---
.../org/apache/atlas/query/LexerTest.scala | 49 -
.../apache/atlas/query/LineageQueryTest.scala | 541 -------
.../org/apache/atlas/query/ParserTest.scala | 100 --
.../apache/atlas/query/QueryTestsUtils.scala | 206 ---
server-api/pom.xml | 5 -
.../java/org/apache/atlas/RequestContext.java | 197 ---
.../java/org/apache/atlas/RequestContextV1.java | 38 +-
.../apache/atlas/discovery/LineageService.java | 75 -
.../exception/EntityNotFoundException.java | 44 +
.../atlas/exception/NotFoundException.java | 46 +
.../exception/SchemaNotFoundException.java | 40 +
.../atlas/exception/TraitNotFoundException.java | 45 +
.../atlas/listener/EntityChangeListener.java | 16 +-
.../atlas/listener/TypesChangeListener.java | 10 +-
.../apache/atlas/services/MetadataService.java | 317 ----
.../exception/EntityExistsException.java | 32 -
.../exception/EntityNotFoundException.java | 44 -
.../typesystem/exception/NotFoundException.java | 46 -
.../NullRequiredAttributeException.java | 59 -
.../exception/SchemaNotFoundException.java | 40 -
.../exception/TraitNotFoundException.java | 45 -
typesystem/.gitignore | 35 -
typesystem/LICENSE.md | 201 ---
typesystem/doc/IDataType.uml | 190 ---
typesystem/doc/Instance.uml | 126 --
typesystem/doc/QueryDSL.org | 266 ----
typesystem/doc/notes.MD | 78 -
typesystem/pom.xml | 165 ---
.../atlas/classification/InterfaceAudience.java | 48 -
.../org/apache/atlas/typesystem/IInstance.java | 41 -
.../typesystem/IReferenceableInstance.java | 38 -
.../org/apache/atlas/typesystem/IStruct.java | 26 -
.../apache/atlas/typesystem/ITypedInstance.java | 88 --
.../typesystem/ITypedReferenceableInstance.java | 22 -
.../apache/atlas/typesystem/ITypedStruct.java | 22 -
.../apache/atlas/typesystem/Referenceable.java | 229 ---
.../org/apache/atlas/typesystem/Struct.java | 124 --
.../exception/TypeExistsException.java | 27 -
.../exception/TypeNotFoundException.java | 46 -
.../persistence/AtlasSystemAttributes.java | 115 --
.../persistence/DownCastStructInstance.java | 80 -
.../apache/atlas/typesystem/persistence/Id.java | 307 ----
.../atlas/typesystem/persistence/MapIds.java | 70 -
.../persistence/ReferenceableInstance.java | 135 --
.../typesystem/persistence/StructInstance.java | 790 ----------
.../typesystem/types/AbstractDataType.java | 119 --
.../typesystem/types/AttributeDefinition.java | 81 -
.../atlas/typesystem/types/AttributeInfo.java | 134 --
.../atlas/typesystem/types/ClassType.java | 259 ----
.../atlas/typesystem/types/DataTypes.java | 655 ---------
.../typesystem/types/DownCastFieldMapping.java | 52 -
.../apache/atlas/typesystem/types/EnumType.java | 121 --
.../typesystem/types/EnumTypeDefinition.java | 64 -
.../atlas/typesystem/types/EnumValue.java | 65 -
.../atlas/typesystem/types/FieldMapping.java | 162 --
.../typesystem/types/HierarchicalType.java | 545 -------
.../types/HierarchicalTypeDefinition.java | 74 -
.../types/HierarchicalTypeDependencySorter.java | 75 -
.../typesystem/types/IConstructableType.java | 34 -
.../atlas/typesystem/types/IDataType.java | 61 -
.../atlas/typesystem/types/Multiplicity.java | 90 --
.../typesystem/types/ObjectGraphTraversal.java | 199 ---
.../typesystem/types/ObjectGraphWalker.java | 226 ---
.../atlas/typesystem/types/StructType.java | 280 ----
.../typesystem/types/StructTypeDefinition.java | 84 --
.../atlas/typesystem/types/TraitType.java | 104 --
.../atlas/typesystem/types/TypeSystem.java | 821 -----------
.../typesystem/types/TypeUpdateException.java | 39 -
.../atlas/typesystem/types/TypeUtils.java | 136 --
.../typesystem/types/TypedStructHandler.java | 115 --
.../types/ValueConversionException.java | 63 -
.../types/cache/DefaultTypeCache.java | 301 ----
.../atlas/typesystem/types/cache/TypeCache.java | 153 --
.../atlas/typesystem/types/package-info.java | 125 --
.../atlas/typesystem/types/utils/TypesUtil.java | 158 --
typesystem/src/main/resources/atlas-log4j.xml | 105 --
.../org/apache/atlas/typesystem/TypesDef.scala | 62 -
.../typesystem/builders/InstanceBuilder.scala | 217 ---
.../typesystem/builders/TypesBuilder.scala | 188 ---
.../typesystem/json/InstanceSerialization.scala | 374 -----
.../atlas/typesystem/json/Serialization.scala | 348 -----
.../typesystem/json/TypesSerialization.scala | 270 ----
.../apache/atlas/ApplicationPropertiesTest.java | 83 --
.../typesystem/json/SerializationJavaTest.java | 166 ---
.../apache/atlas/typesystem/types/BaseTest.java | 190 ---
.../atlas/typesystem/types/ClassTest.java | 124 --
.../apache/atlas/typesystem/types/EnumTest.java | 245 ----
.../typesystem/types/FieldMappingTest.java | 151 --
.../HierarchicalTypeDependencySorterTest.java | 81 -
.../typesystem/types/HierarchicalTypeTest.java | 92 --
.../atlas/typesystem/types/StructTest.java | 126 --
.../atlas/typesystem/types/TraitTest.java | 247 ----
.../typesystem/types/TypeInheritanceTest.java | 252 ----
.../atlas/typesystem/types/TypeSystemTest.java | 327 -----
.../typesystem/types/TypeUpdateBaseTest.java | 98 --
.../atlas/typesystem/types/ValidationTest.java | 102 --
.../types/cache/DefaultTypeCacheTest.java | 450 ------
.../test/resources/atlas-application.properties | 145 --
typesystem/src/test/resources/policy-store.txt | 9 -
.../src/test/resources/sampleInstance.json | 72 -
typesystem/src/test/resources/sampleTypes.json | 633 --------
.../test/resources/users-credentials.properties | 3 -
.../atlas/typesystem/builders/BuilderTest.scala | 81 -
.../builders/InstanceBuilderTest.scala | 105 --
.../typesystem/builders/MultiplicityTest.scala | 124 --
.../typesystem/builders/TypesBuilderTest.scala | 33 -
.../json/InstanceSerializationTest.scala | 164 ---
.../typesystem/json/SerializationTest.scala | 263 ----
.../json/TypesSerializationTest.scala | 342 -----
webapp/pom.xml | 24 +-
.../atlas/classification/InterfaceAudience.java | 48 +
.../org/apache/atlas/examples/QuickStart.java | 150 +-
.../org/apache/atlas/examples/QuickStartV2.java | 66 +-
.../NotificationEntityChangeListener.java | 133 +-
.../notification/NotificationHookConsumer.java | 211 +--
.../web/errors/NotFoundExceptionMapper.java | 2 +-
.../web/filters/AtlasAuthenticationFilter.java | 4 +-
.../apache/atlas/web/filters/AuditFilter.java | 7 +-
.../web/resources/DataSetLineageResource.java | 133 +-
.../atlas/web/resources/EntityResource.java | 172 +--
.../atlas/web/resources/LineageResource.java | 67 +-
.../resources/MetadataDiscoveryResource.java | 24 +-
.../atlas/web/resources/TypesResource.java | 6 +-
.../atlas/web/service/ActiveInstanceState.java | 2 +-
.../org/apache/atlas/web/util/LineageUtils.java | 32 +-
.../org/apache/atlas/examples/QuickStartIT.java | 4 +-
.../atlas/notification/AdaptiveWaiterTest.java | 10 +-
.../notification/EntityNotificationIT.java | 130 +-
.../NotificationEntityChangeListenerTest.java | 35 +-
.../NotificationHookConsumerIT.java | 84 +-
.../NotificationHookConsumerKafkaTest.java | 68 +-
.../NotificationHookConsumerTest.java | 155 +-
.../org/apache/atlas/util/RestUtilsTest.java | 139 +-
.../atlas/web/adapters/TestEntitiesREST.java | 2 -
.../atlas/web/adapters/TestEntityREST.java | 2 -
.../AtlasAuthenticationKerberosFilterTest.java | 4 +-
.../atlas/web/integration/BaseResourceIT.java | 366 +++--
.../atlas/web/integration/BasicSearchIT.java | 4 +-
.../DataSetLineageJerseyResourceIT.java | 55 +-
.../EntityDiscoveryJerseyResourceIT.java | 34 +-
.../web/integration/EntityJerseyResourceIT.java | 129 +-
.../EntityLineageJerseyResourceIT.java | 36 +-
.../integration/EntityV2JerseyResourceIT.java | 47 +-
.../MetadataDiscoveryJerseyResourceIT.java | 40 +-
.../integration/TypedefsJerseyResourceIT.java | 28 +-
.../web/integration/TypesJerseyResourceIT.java | 143 +-
.../web/service/ActiveInstanceStateTest.java | 2 +-
407 files changed, 8457 insertions(+), 52459 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/falcon-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/pom.xml b/addons/falcon-bridge/pom.xml
index a732e5d..f5b0b45 100644
--- a/addons/falcon-bridge/pom.xml
+++ b/addons/falcon-bridge/pom.xml
@@ -48,11 +48,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-v1</artifactId>
</dependency>
@@ -100,13 +95,6 @@
<type>pom</type>
<scope>test</scope>
</dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
</dependencies>
<profiles>
@@ -141,21 +129,6 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-core_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-ast_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>atlas-client-common</artifactId>
<version>${project.version}</version>
@@ -172,11 +145,6 @@
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
<artifactId>atlas-intg</artifactId>
<version>${project.version}</version>
</artifactItem>
@@ -196,28 +164,8 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -225,11 +173,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</artifactItem>
- <artifactItem>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </artifactItem>
</artifactItems>
</configuration>
</execution>
@@ -302,7 +245,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../../intg/target/test-classes</value>
</systemProperty>
<systemProperty>
<key>atlas.home</key>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
index ff74559..11d13b3 100644
--- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
+++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/bridge/FalconBridge.java
@@ -24,7 +24,7 @@ import org.apache.atlas.falcon.Util.EventUtil;
import org.apache.atlas.falcon.model.FalconDataTypes;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.falcon.FalconException;
@@ -204,8 +204,6 @@ public class FalconBridge {
*
* @param process process entity
* @param falconStore config store
- * @param user falcon user
- * @param timestamp timestamp of entity
* @return process instance reference
*
* @throws FalconException if retrieving from the configuration store fail
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java
index 188b754..77177b4 100644
--- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java
+++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/hook/FalconHook.java
@@ -25,8 +25,9 @@ import org.apache.atlas.falcon.event.FalconEvent;
import org.apache.atlas.falcon.publisher.FalconEventPublisher;
import org.apache.atlas.hook.AtlasHook;
import org.apache.atlas.kafka.NotificationProvider;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.apache.falcon.FalconException;
import org.apache.falcon.entity.store.ConfigurationStore;
import org.apache.falcon.entity.v0.feed.Feed;
@@ -151,14 +152,14 @@ public class FalconHook extends AtlasHook implements FalconEventPublisher {
private void fireAndForget(FalconEvent event) throws FalconException, URISyntaxException {
LOG.info("Entered Atlas hook for Falcon hook operation {}", event.getOperation());
- List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
+ List<HookNotification> messages = new ArrayList<>();
Operation op = getOperation(event.getOperation());
String user = getUser(event.getUser());
LOG.info("fireAndForget user:{}", user);
switch (op) {
case ADD:
- messages.add(new HookNotification.EntityCreateRequest(user, createEntities(event, user)));
+ messages.add(new EntityCreateRequest(user, createEntities(event, user)));
break;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
index 7212921..c073b3e 100644
--- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
+++ b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/hook/FalconHookIT.java
@@ -26,9 +26,9 @@ import org.apache.atlas.falcon.bridge.FalconBridge;
import org.apache.atlas.falcon.model.FalconDataTypes;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.TypeUtils;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
@@ -162,7 +162,7 @@ public class FalconHookIT {
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, cluster.getName());
}
- private TypeUtils.Pair<String, Feed> getHDFSFeed(String feedResource, String clusterName) throws Exception {
+ private TypesUtil.Pair<String, Feed> getHDFSFeed(String feedResource, String clusterName) throws Exception {
Feed feed = loadEntity(EntityType.FEED, feedResource, "feed" + random());
org.apache.falcon.entity.v0.feed.Cluster feedCluster = feed.getClusters().getClusters().get(0);
feedCluster.setName(clusterName);
@@ -174,9 +174,9 @@ public class FalconHookIT {
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
FalconBridge.getFeedQualifiedName(feed.getName(), clusterName));
Referenceable processEntity = atlasClient.getEntity(processId);
- assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId()._getId(), feedId);
+ assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId(), feedId);
- String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
+ String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId();
Referenceable pathEntity = atlasClient.getEntity(inputId);
assertEquals(pathEntity.getTypeName(), HiveMetaStoreBridge.HDFS_PATH);
@@ -185,7 +185,7 @@ public class FalconHookIT {
assertEquals(pathEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
FalconBridge.normalize(dataLocation.getPath()));
- return TypeUtils.Pair.of(feedId, feed);
+ return TypesUtil.Pair.of(feedId, feed);
}
private Feed getTableFeed(String feedResource, String clusterName) throws Exception {
@@ -236,9 +236,9 @@ public class FalconHookIT {
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
FalconBridge.getFeedQualifiedName(feedName, clusterName));
Referenceable processEntity = atlasClient.getEntity(processId);
- assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId()._getId(), feedId);
+ assertEquals(((List<Id>)processEntity.get("outputs")).get(0).getId(), feedId);
- String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId()._getId();
+ String inputId = ((List<Id>) processEntity.get("inputs")).get(0).getId();
Referenceable tableEntity = atlasClient.getEntity(inputId);
assertEquals(tableEntity.getTypeName(), HiveDataTypes.HIVE_TABLE.getName());
assertEquals(tableEntity.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME),
@@ -280,7 +280,7 @@ public class FalconHookIT {
Cluster cluster = loadEntity(EntityType.CLUSTER, CLUSTER_RESOURCE, "cluster" + random());
STORE.publish(EntityType.CLUSTER, cluster);
- TypeUtils.Pair<String, Feed> result = getHDFSFeed(FEED_HDFS_RESOURCE, cluster.getName());
+ TypesUtil.Pair<String, Feed> result = getHDFSFeed(FEED_HDFS_RESOURCE, cluster.getName());
Feed infeed = result.right;
String infeedId = result.left;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hbase-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/hbase-bridge/pom.xml b/addons/hbase-bridge/pom.xml
index ebdb361..3dc4f1b 100644
--- a/addons/hbase-bridge/pom.xml
+++ b/addons/hbase-bridge/pom.xml
@@ -124,13 +124,6 @@
</dependency>
<dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
-
- <dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>${hbase.version}</version>
@@ -186,42 +179,6 @@
<id>dist</id>
<build>
<plugins>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <scalaVersion>${scala.version}</scalaVersion>
- <recompileMode>incremental</recompileMode>
- <useZincServer>true</useZincServer>
- <source>1.7</source>
- <target>1.7</target>
- <args>
- <arg>-unchecked</arg>
- <arg>-deprecation</arg>
- <arg>-feature</arg>
- </args>
- <jvmArgs>
- <jvmArg>-Xmx512m</jvmArg>
- </jvmArgs>
- </configuration>
- </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
@@ -244,21 +201,6 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-core_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-ast_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>atlas-client-v1</artifactId>
<version>${project.version}</version>
@@ -275,11 +217,6 @@
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
<artifactId>atlas-notification</artifactId>
<version>${project.version}</version>
</artifactItem>
@@ -294,33 +231,8 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>com.google.inject.extensions</groupId>
- <artifactId>guice-multibindings</artifactId>
- <version>${guice.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -328,11 +240,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</artifactItem>
- <artifactItem>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </artifactItem>
</artifactItems>
</configuration>
</execution>
@@ -406,7 +313,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../../intg/target/test-classes</value>
</systemProperty>
<systemProperty>
<key>atlas.home</key>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
----------------------------------------------------------------------
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
index 8c7481a..03e340c 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/bridge/HBaseAtlasHook.java
@@ -23,8 +23,11 @@ import org.apache.atlas.AtlasConstants;
import org.apache.atlas.hbase.model.HBaseOperationContext;
import org.apache.atlas.hbase.model.HBaseDataTypes;
import org.apache.atlas.hook.AtlasHook;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
import org.apache.commons.configuration.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -219,13 +222,13 @@ public class HBaseAtlasHook extends AtlasHook {
case CREATE_NAMESPACE:
LOG.info("Create NameSpace {}", nameSpaceRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef));
+ hbaseOperationContext.addMessage(new EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef));
break;
case ALTER_NAMESPACE:
LOG.info("Modify NameSpace {}", nameSpaceRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef));
+ hbaseOperationContext.addMessage(new EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef));
break;
}
}
@@ -235,10 +238,10 @@ public class HBaseAtlasHook extends AtlasHook {
LOG.info("Delete NameSpace {}", nameSpaceQualifiedName);
- hbaseOperationContext.addMessage(new HookNotification.EntityDeleteRequest(hbaseOperationContext.getUser(),
- HBaseDataTypes.HBASE_NAMESPACE.getName(),
- REFERENCEABLE_ATTRIBUTE_NAME,
- nameSpaceQualifiedName));
+ hbaseOperationContext.addMessage(new EntityDeleteRequest(hbaseOperationContext.getUser(),
+ HBaseDataTypes.HBASE_NAMESPACE.getName(),
+ REFERENCEABLE_ATTRIBUTE_NAME,
+ nameSpaceQualifiedName));
}
private void createOrUpdateTableInstance(HBaseOperationContext hbaseOperationContext) {
@@ -252,13 +255,13 @@ public class HBaseAtlasHook extends AtlasHook {
case CREATE_TABLE:
LOG.info("Create Table {}", tableRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef));
+ hbaseOperationContext.addMessage(new EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef));
break;
case ALTER_TABLE:
LOG.info("Modify Table {}", tableRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef));
+ hbaseOperationContext.addMessage(new EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef));
break;
}
}
@@ -276,10 +279,10 @@ public class HBaseAtlasHook extends AtlasHook {
LOG.info("Delete Table {}", tableQualifiedName);
- hbaseOperationContext.addMessage(new HookNotification.EntityDeleteRequest(hbaseOperationContext.getUser(),
- HBaseDataTypes.HBASE_TABLE.getName(),
- REFERENCEABLE_ATTRIBUTE_NAME,
- tableQualifiedName));
+ hbaseOperationContext.addMessage(new EntityDeleteRequest(hbaseOperationContext.getUser(),
+ HBaseDataTypes.HBASE_TABLE.getName(),
+ REFERENCEABLE_ATTRIBUTE_NAME,
+ tableQualifiedName));
}
private void createOrUpdateColumnFamilyInstance(HBaseOperationContext hbaseOperationContext) {
@@ -291,13 +294,13 @@ public class HBaseAtlasHook extends AtlasHook {
case CREATE_COLUMN_FAMILY:
LOG.info("Create ColumnFamily {}", columnFamilyRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef, columnFamilyRef));
+ hbaseOperationContext.addMessage(new EntityCreateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef, columnFamilyRef));
break;
case ALTER_COLUMN_FAMILY:
LOG.info("Alter ColumnFamily {}", columnFamilyRef.get(REFERENCEABLE_ATTRIBUTE_NAME));
- hbaseOperationContext.addMessage(new HookNotification.EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef, columnFamilyRef));
+ hbaseOperationContext.addMessage(new EntityUpdateRequest(hbaseOperationContext.getUser(), nameSpaceRef, tableRef, columnFamilyRef));
break;
}
}
@@ -316,10 +319,10 @@ public class HBaseAtlasHook extends AtlasHook {
LOG.info("Delete ColumnFamily {}", columnFamilyQualifiedName);
- hbaseOperationContext.addMessage(new HookNotification.EntityDeleteRequest(hbaseOperationContext.getUser(),
- HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(),
- REFERENCEABLE_ATTRIBUTE_NAME,
- columnFamilyQualifiedName));
+ hbaseOperationContext.addMessage(new EntityDeleteRequest(hbaseOperationContext.getUser(),
+ HBaseDataTypes.HBASE_COLUMN_FAMILY.getName(),
+ REFERENCEABLE_ATTRIBUTE_NAME,
+ columnFamilyQualifiedName));
}
@@ -491,7 +494,7 @@ public class HBaseAtlasHook extends AtlasHook {
LOG.debug("==> HBaseAtlasHook.notifyAsPrivilegedAction({})", hbaseOperationContext);
}
- final List<HookNotification.HookNotificationMessage> messages = hbaseOperationContext.getMessages();
+ final List<HookNotification> messages = hbaseOperationContext.getMessages();
try {
@@ -534,7 +537,7 @@ public class HBaseAtlasHook extends AtlasHook {
*
* @param messages hook notification messages
*/
- protected void notifyEntities(List<HookNotification.HookNotificationMessage> messages) {
+ protected void notifyEntities(List<HookNotification> messages) {
final int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
notifyEntities(messages, maxRetries);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
----------------------------------------------------------------------
diff --git a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
index ce0f212..bc8485b 100644
--- a/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
+++ b/addons/hbase-bridge/src/main/java/org/apache/atlas/hbase/model/HBaseOperationContext.java
@@ -19,7 +19,7 @@
package org.apache.atlas.hbase.model;
import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.model.notification.HookNotification;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -74,7 +74,7 @@ public class HBaseOperationContext {
this(null, nameSpace, null, tableName, null, hColumnDescriptor, columnFamily, operation, ugi, user, owner, hbaseConf);
}
- private List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
+ private List<HookNotification> messages = new ArrayList<>();
public UserGroupInformation getUgi() {
return ugi;
@@ -120,7 +120,7 @@ public class HBaseOperationContext {
return columnFamily;
}
- public void addMessage(HookNotification.HookNotificationMessage message) {
+ public void addMessage(HookNotification message) {
messages.add(message);
}
@@ -128,7 +128,7 @@ public class HBaseOperationContext {
return owner;
}
- public List<HookNotification.HookNotificationMessage> getMessages() {
+ public List<HookNotification> getMessages() {
return messages;
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java b/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
index 032cb51..0d2e8df 100644
--- a/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
+++ b/addons/hbase-bridge/src/test/java/org/apache/atlas/hbase/HBaseAtlasHookIT.java
@@ -22,7 +22,7 @@ import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hbase.bridge.HBaseAtlasHook;
import org.apache.atlas.hbase.model.HBaseDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.hadoop.conf.Configuration;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hdfs-model/pom.xml
----------------------------------------------------------------------
diff --git a/addons/hdfs-model/pom.xml b/addons/hdfs-model/pom.xml
index 4fd738a..b3f06b7 100644
--- a/addons/hdfs-model/pom.xml
+++ b/addons/hdfs-model/pom.xml
@@ -45,11 +45,6 @@
<!-- apache atlas core dependencies -->
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-v1</artifactId>
</dependency>
@@ -83,72 +78,11 @@
<artifactId>atlas-repository</artifactId>
<scope>test</scope>
</dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <classifier>tests</classifier>
- <scope>test</scope>
- </dependency>
</dependencies>
<build>
<plugins>
<plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.0</version>
- <executions>
- <execution>
- <id>scala-compile-first</id>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile-first</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <scalaVersion>${scala.version}</scalaVersion>
- <recompileMode>incremental</recompileMode>
- <useZincServer>true</useZincServer>
- <source>1.7</source>
- <target>1.7</target>
- <args>
- <arg>-unchecked</arg>
- <arg>-deprecation</arg>
- <arg>-feature</arg>
- </args>
- <jvmArgs>
- <jvmArg>-Xmx512m</jvmArg>
- </jvmArgs>
- <!--
- <javacArgs>
- <javacArg>-source</javacArg>
- <javacArg>${java.version}</javacArg>
- <javacArg>-target</javacArg>
- <javacArg>${java.version}</javacArg>
- </javacArgs>
- -->
- <!-- The following plugin is required to use quasiquotes in Scala 2.10 and is used
- by Spark SQL for code generation. -->
- <!--<compilerPlugins>
- <compilerPlugin>
- <groupId>org.scalamacros</groupId>
- <artifactId>paradise_${scala.version}</artifactId>
- <version>${scala.macros.version}</version>
- </compilerPlugin>
- </compilerPlugins>-->
- </configuration>
- </plugin>
-
- <plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-site-plugin</artifactId>
<dependencies>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/pom.xml
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/pom.xml b/addons/hive-bridge/pom.xml
index faf14d3..b16d1c3 100755
--- a/addons/hive-bridge/pom.xml
+++ b/addons/hive-bridge/pom.xml
@@ -95,11 +95,6 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.apache.atlas</groupId>
<artifactId>atlas-client-v1</artifactId>
</dependency>
@@ -156,7 +151,7 @@
<dependency>
<groupId>org.apache.atlas</groupId>
- <artifactId>atlas-typesystem</artifactId>
+ <artifactId>atlas-intg</artifactId>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
@@ -189,21 +184,6 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-native_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-core_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.json4s</groupId>
- <artifactId>json4s-ast_${scala.binary.version}</artifactId>
- <version>${json.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>${project.groupId}</groupId>
<artifactId>atlas-client-common</artifactId>
<version>${project.version}</version>
@@ -220,11 +200,6 @@
</artifactItem>
<artifactItem>
<groupId>${project.groupId}</groupId>
- <artifactId>atlas-typesystem</artifactId>
- <version>${project.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
<artifactId>atlas-intg</artifactId>
<version>${project.version}</version>
</artifactItem>
@@ -244,28 +219,8 @@
<version>${project.version}</version>
</artifactItem>
<artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <version>${scala.version}</version>
- </artifactItem>
- <artifactItem>
<groupId>org.apache.kafka</groupId>
- <artifactId>kafka_${scala.binary.version}</artifactId>
+ <artifactId>kafka_${kafka.scala.binary.version}</artifactId>
<version>${kafka.version}</version>
</artifactItem>
<artifactItem>
@@ -273,11 +228,6 @@
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</artifactItem>
- <artifactItem>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>${gson.version}</version>
- </artifactItem>
</artifactItems>
</configuration>
</execution>
@@ -351,7 +301,7 @@
</systemProperty>
<systemProperty>
<key>atlas.conf</key>
- <value>${project.build.directory}/../../../typesystem/target/test-classes</value>
+ <value>${project.build.directory}/../../../intg/target/test-classes</value>
</systemProperty>
<systemProperty>
<key>atlas.home</key>
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java
index 663fcdc..ba10008 100644
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/ColumnLineageUtils.java
@@ -20,7 +20,7 @@ package org.apache.atlas.hive.bridge;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.hadoop.hive.ql.hooks.LineageInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
index d2d2fb5..ab0094b 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
@@ -27,10 +27,10 @@ import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.hive.hook.HiveHook;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.hook.AtlasHookException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasType;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
@@ -170,7 +170,7 @@ public class HiveMetaStoreBridge {
dbRef = createDBInstance(db);
dbRef = registerInstance(dbRef);
} else {
- LOG.info("Database {} is already registered with id {}. Updating it.", databaseName, dbRef.getId().id);
+ LOG.info("Database {} is already registered with id {}. Updating it.", databaseName, dbRef.getId().getId());
dbRef = createOrUpdateDBInstance(db, dbRef);
updateInstance(dbRef);
}
@@ -208,7 +208,7 @@ public class HiveMetaStoreBridge {
String typeName = referenceable.getTypeName();
LOG.debug("creating instance of type {}", typeName);
- String entityJSON = InstanceSerialization.toJson(referenceable, true);
+ String entityJSON = AtlasType.toV1Json(referenceable);
LOG.debug("Submitting new entity {} = {}", referenceable.getTypeName(), entityJSON);
List<String> guids = getAtlasClient().createEntity(entityJSON);
LOG.debug("created instance for type {}, guid: {}", typeName, guids);
@@ -506,7 +506,7 @@ public class HiveMetaStoreBridge {
tableReference = registerInstance(tableReference);
} else {
LOG.info("Table {}.{} is already registered with id {}. Updating entity.", dbName, tableName,
- tableReference.getId().id);
+ tableReference.getId().getId());
tableReference = createOrUpdateTableInstance(dbReference, tableReference, table);
updateInstance(tableReference);
}
@@ -520,10 +520,10 @@ public class HiveMetaStoreBridge {
String typeName = referenceable.getTypeName();
LOG.debug("updating instance of type {}", typeName);
- String entityJSON = InstanceSerialization.toJson(referenceable, true);
+ String entityJSON = AtlasType.toV1Json(referenceable);
LOG.debug("Updating entity {} = {}", referenceable.getTypeName(), entityJSON);
- atlasClient.updateEntity(referenceable.getId().id, referenceable);
+ atlasClient.updateEntity(referenceable.getId().getId(), referenceable);
}
public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 0e8284d..57f5efb 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -28,8 +28,11 @@ import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.model.HiveDataTypes;
import org.apache.atlas.hook.AtlasHook;
import org.apache.atlas.hook.AtlasHookException;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityPartialUpdateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.fs.Path;
@@ -331,7 +334,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable());
LOG.info("Deleting table {} ", tblQualifiedName);
event.addMessage(
- new HookNotification.EntityDeleteRequest(event.getUser(),
+ new EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(),
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
tblQualifiedName));
@@ -350,7 +353,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
} else if (Type.DATABASE.equals(output.getType())) {
final String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(dgiBridge.getClusterName(), output.getDatabase().getName());
event.addMessage(
- new HookNotification.EntityDeleteRequest(event.getUser(),
+ new EntityDeleteRequest(event.getUser(),
HiveDataTypes.HIVE_DB.getName(),
AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
dbQualifiedName));
@@ -412,7 +415,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
- event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+ event.addMessage(new EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldColumnQFName, newColEntity));
}
@@ -481,7 +484,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
ArrayList<String> alias_list = new ArrayList<>();
alias_list.add(oldTable.getTableName().toLowerCase());
newEntity.set(HiveMetaStoreBridge.TABLE_ALIAS_LIST, alias_list);
- event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+ event.addMessage(new EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldTableQFName, newEntity));
@@ -499,7 +502,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
///Only QF Name changes
newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName);
- event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+ event.addMessage(new EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldColumnQFName, newColEntity));
newColEntities.add(newColEntity);
@@ -518,7 +521,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
final Referenceable newSDEntity = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
newSDEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newSDQFName);
- event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(),
+ event.addMessage(new EntityPartialUpdateRequest(event.getUser(),
HiveDataTypes.HIVE_STORAGEDESC.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
oldSDQFName, newSDEntity));
@@ -593,7 +596,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
}
if (!entities.isEmpty()) {
- event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
+ event.addMessage(new EntityUpdateRequest(event.getUser(), entities));
}
return result;
@@ -719,7 +722,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
private void addEntityUpdateNotificationMessagess(final HiveEventContext event, final Collection<Referenceable> entities) {
// process each entity as separate message to avoid running into OOM errors
for (Referenceable entity : entities) {
- event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entity));
+ event.addMessage(new EntityUpdateRequest(event.getUser(), entity));
}
}
@@ -1089,7 +1092,7 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
public Map<String, List<ColumnLineageUtils.HiveColumnLineageInfo>> lineageInfo;
- private List<HookNotification.HookNotificationMessage> messages = new ArrayList<>();
+ private List<HookNotification> messages = new ArrayList<>();
public void setInputs(Set<ReadEntity> inputs) {
this.inputs = inputs;
@@ -1172,11 +1175,11 @@ public class HiveHook extends AtlasHook implements ExecuteWithHookContext {
return queryStartTime;
}
- public void addMessage(HookNotification.HookNotificationMessage message) {
+ public void addMessage(HookNotification message) {
messages.add(message);
}
- public List<HookNotification.HookNotificationMessage> getMessages() {
+ public List<HookNotification> getMessages() {
return messages;
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
index 0d163ee..d9fb46e 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/HiveITBase.java
@@ -23,8 +23,8 @@ import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
import org.apache.atlas.hive.hook.HiveHookIT;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Id;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
index 0256cf3..819d734 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridgeTest.java
@@ -21,7 +21,7 @@ package org.apache.atlas.hive.bridge;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.Database;
@@ -38,8 +38,8 @@ import org.mockito.MockitoAnnotations;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
-import scala.actors.threadpool.Arrays;
+import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java
index d09db1b..22d3c59 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/bridge/HiveMetastoreBridgeIT.java
@@ -21,8 +21,8 @@ package org.apache.atlas.hive.bridge;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.hive.HiveITBase;
import org.apache.atlas.hive.model.HiveDataTypes;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Id;
import org.testng.annotations.Test;
import java.util.List;
@@ -50,7 +50,7 @@ public class HiveMetastoreBridgeIT extends HiveITBase {
List<Id> outputs = (List<Id>) processReference.get(OUTPUTS);
assertEquals(outputs.size(), 1);
- assertEquals(outputs.get(0).getId()._getId(), tableId);
+ assertEquals(outputs.get(0).getId(), tableId);
int tableCount = atlasClient.listEntities(HiveDataTypes.HIVE_TABLE.getName()).size();
@@ -84,6 +84,6 @@ public class HiveMetastoreBridgeIT extends HiveITBase {
getTableProcessQualifiedName(DEFAULT_DB, tableName), null);
List<Id> outputs = (List<Id>) atlasClient.getEntity(processId).get(OUTPUTS);
assertEquals(outputs.size(), 1);
- assertEquals(outputs.get(0).getId()._getId(), tableId);
+ assertEquals(outputs.get(0).getId(), tableId);
}
}
[22/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java b/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
deleted file mode 100755
index bd730e4..0000000
--- a/repository/src/test/java/org/apache/atlas/discovery/GraphBackedDiscoveryServiceTest.java
+++ /dev/null
@@ -1,1334 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery;
-
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.BaseRepositoryTest;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
-import org.apache.atlas.repository.graphdb.GremlinVersion;
-import org.apache.atlas.util.AtlasGremlinQueryProvider;
-import org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.text.SimpleDateFormat;
-import java.util.*;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphBackedDiscoveryServiceTest extends BaseRepositoryTest {
-
- @Inject
- private MetadataRepository repositoryService;
-
- @Inject
- private GraphBackedDiscoveryService discoveryService;
- private QueryParams queryParams = new QueryParams(40, 0);
- private static final String idType = "idType";
- @Override
- @BeforeClass
- public void setUp() throws Exception {
- super.setUp();
-
- repositoryService = TestUtils.addTransactionWrapper(repositoryService);
- final TypeSystem typeSystem = TypeSystem.getInstance();
- Collection<String> oldTypeNames = new HashSet<>();
- oldTypeNames.addAll(typeSystem.getTypeNames());
-
- TestUtils.defineDeptEmployeeTypes(typeSystem);
-
- addIndexesForNewTypes(oldTypeNames, typeSystem);
-
- ITypedReferenceableInstance hrDept = TestUtils.createDeptEg1(typeSystem);
- repositoryService.createEntities(hrDept);
-
- ITypedReferenceableInstance jane = repositoryService.getEntityDefinition("Manager", "name", "Jane");
- Id janeGuid = jane.getId();
- ClassType personType = typeSystem.getDataType(ClassType.class, "Person");
- ITypedReferenceableInstance instance = personType.createInstance(janeGuid);
- instance.set("orgLevel", "L1");
- repositoryService.updatePartial(instance);
- }
-
- private void addIndexesForNewTypes(Collection<String> oldTypeNames, final TypeSystem typeSystem) throws AtlasException {
- Set<String> newTypeNames = new HashSet<>();
- newTypeNames.addAll(typeSystem.getTypeNames());
- newTypeNames.removeAll(oldTypeNames);
-
- Collection<IDataType> newTypes = new ArrayList<>();
- for(String name : newTypeNames) {
- try {
- newTypes.add(typeSystem.getDataType(IDataType.class, name));
- } catch (AtlasException e) {
- e.printStackTrace();
- }
-
- }
-
- //We need to commit the transaction before creating the indices to release the locks held by the transaction.
- //otherwise, the index commit will fail while waiting for the those locks to be released.
- AtlasGraphProvider.getGraphInstance().commit();
- GraphBackedSearchIndexer idx = new GraphBackedSearchIndexer(new AtlasTypeRegistry());
- idx.onAdd(newTypes);
- }
-
- @BeforeMethod
- public void setupContext() {
- RequestContext.createContext();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- private String searchByDSL(String dslQuery) throws Exception {
- return discoveryService.searchByDSL(dslQuery, queryParams);
- }
-
- @Test
- public void testSearchBySystemProperties() throws Exception {
- //system property in select
- String dslQuery = "from Department select __guid";
-
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
- assertNotNull(rows.getJSONObject(0).getString("__guid"));
-
- //system property in where clause
- String guid = rows.getJSONObject(0).getString("__guid");
- dslQuery = "Department where __guid = '" + guid + "' and __state = 'ACTIVE'";
- jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
-
- //Assert system attributes are not null
- JSONObject sys_attributes = (JSONObject)rows.getJSONObject(0).get("$systemAttributes$");
- assertNotNull(sys_attributes.get("createdBy"));
- assertNotNull(sys_attributes.get("modifiedBy"));
- assertNotNull(sys_attributes.get("createdTime"));
- assertNotNull(sys_attributes.get("modifiedTime"));
-
-
- //Assert that createdTime and modifiedTime are valid dates
- String createdTime = (String) sys_attributes.get("createdTime");
- String modifiedTime = (String) sys_attributes.get("modifiedTime");
- final String outputFormat = "EEE MMM dd HH:mm:ss z yyyy";
- SimpleDateFormat df = new SimpleDateFormat(outputFormat);
- Date createdDate = df.parse(createdTime);
- Date modifiedDate = df.parse(modifiedTime);
- assertNotNull(createdDate);
- assertNotNull(modifiedDate);
-
- final String testTs = "\"2011-11-01T02:35:58.440Z\"";
- dslQuery = "Department where " + Constants.TIMESTAMP_PROPERTY_KEY + " > " + testTs;
- jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
-
-
- dslQuery = "Department where " + Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY + " > " + testTs;
- jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
-
- dslQuery = "from Department select " + Constants.CREATED_BY_KEY;
- jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
-
- dslQuery = "from Department select " + Constants.MODIFIED_BY_KEY;
- jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
- }
-
-
- /*
- * https://issues.apache.org/jira/browse/ATLAS-1875
- */
- @Test
- public void testGremlinSearchReturnVertexId() throws Exception {
- final AtlasGremlinQueryProvider gremlinQueryProvider;
- gremlinQueryProvider = AtlasGremlinQueryProvider.INSTANCE;
-
- // For tinkerpop2 this query was g.V.range(0,0).collect()
- // For tinkerpop3 it should be g.V().range(0,1).toList()
- final String query = gremlinQueryProvider.getQuery(AtlasGremlinQuery.GREMLIN_SEARCH_RETURNS_VERTEX_ID);
-
- List<Map<String,String>> gremlinResults = discoveryService.searchByGremlin(query);
- assertEquals(gremlinResults.size(), 1);
- Map<String, String> properties = gremlinResults.get(0);
- Assert.assertTrue(properties.containsKey(GraphBackedDiscoveryService.GREMLIN_ID_KEY));
-
- }
-
- /*
- * https://issues.apache.org/jira/browse/ATLAS-1875
- */
- @Test
- public void testGremlinSearchReturnEdgeIds() throws Exception {
- final AtlasGremlinQueryProvider gremlinQueryProvider;
- gremlinQueryProvider = AtlasGremlinQueryProvider.INSTANCE;
-
- // For tinkerpop2 this query was g.E.range(0,0).collect()
- // For tinkerpop3 it should be g.E().range(0,1).toList()
- final String query = gremlinQueryProvider.getQuery(AtlasGremlinQuery.GREMLIN_SEARCH_RETURNS_EDGE_ID);
-
- List<Map<String,String>> gremlinResults = discoveryService.searchByGremlin(query);
- assertEquals(gremlinResults.size(), 1);
- Map<String, String> properties = gremlinResults.get(0);
- Assert.assertTrue(properties.containsKey(GraphBackedDiscoveryService.GREMLIN_ID_KEY));
- Assert.assertTrue(properties.containsKey(GraphBackedDiscoveryService.GREMLIN_LABEL_KEY));
- Assert.assertTrue(properties.containsKey(GraphBackedDiscoveryService.GREMLIN_INVERTEX_KEY));
- Assert.assertTrue(properties.containsKey(GraphBackedDiscoveryService.GREMLIN_OUTVERTEX_KEY));
- }
-
-
- @Test
- public void testSearchByDSLReturnsEntity() throws Exception {
- String dslQuery = "from Department";
-
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- assertNotNull(typeName);
- assertEquals(typeName, "Department");
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals(rows.length(), 1);
-
- //Assert that entity state is set in the result entities
- String entityState = rows.getJSONObject(0).getJSONObject("$id$").getString("state");
- assertEquals(entityState, Id.EntityState.ACTIVE.name());
- }
-
- @DataProvider(name = "dslLikeQueriesProvider")
- private Object[][] createDslLikeQueries() {
- return new Object[][]{
- {"hive_table where name like \"sa?es*\"", 3},
- {"hive_db where name like \"R*\"", 1},
- {"hive_db where hive_db.name like \"R???rt?*\" or hive_db.name like \"S?l?s\" or hive_db.name like\"Log*\"", 3},
- {"hive_db where hive_db.name like \"R???rt?*\" and hive_db.name like \"S?l?s\" and hive_db.name like\"Log*\"", 0},
- {"hive_table where name like 'sales*', db where name like 'Sa?es'", 1},
- {"hive_table where name like 'sales*' and db.name like 'Sa?es'", 1},
- {"hive_table where db.name like \"Sa*\"", 4},
- {"hive_table where db.name like \"Sa*\" and name like \"*dim\"", 3},
- };
- }
-
- @Test(dataProvider = "dslLikeQueriesProvider")
- public void testDslSearchUsingLikeOperator(String dslQuery, Integer expectedNumRows) throws Exception {
- runQuery(dslQuery, expectedNumRows, 50, 0);
- }
-
- @Test(expectedExceptions = Throwable.class)
- public void testSearchByDSLBadQuery() throws Exception {
- String dslQuery = "from blah";
- searchByDSL(dslQuery);
- Assert.fail();
- }
-
- @Test
- public void testRawSearch1() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- // Query for all Vertices in Graph
- Object r = discoveryService.searchByGremlin("g.V.toList()");
- Assert.assertTrue(r instanceof List);
- List<Map<String, Object>> resultList = (List<Map<String, Object>>) r;
- Assert.assertTrue(resultList.size() > 0);
- System.out.println("search result = " + r);
-
- // Query for all Vertices of a Type
- r = discoveryService.searchByGremlin("g.V.filter{it." + Constants.ENTITY_TYPE_PROPERTY_KEY + " == 'Department'}.toList()");
- Assert.assertTrue(r instanceof List);
- resultList = (List<Map<String, Object>>) r;
- Assert.assertTrue(resultList.size() > 0);
- System.out.println("search result = " + r);
-
- // Property Query: list all Person names
- r = discoveryService.searchByGremlin("g.V.filter{it." + Constants.ENTITY_TYPE_PROPERTY_KEY + " == 'Person'}.'Person.name'.toList()");
- Assert.assertTrue(r instanceof List);
- resultList = (List<Map<String, Object>>) r;
- Assert.assertTrue(resultList.size() > 0);
- System.out.println("search result = " + r);
- List<Object> names = new ArrayList<>(resultList.size());
- for (Map<String, Object> vertexProps : resultList) {
- names.addAll(vertexProps.values());
- }
- for (String name : Arrays.asList("John", "Max")) {
- Assert.assertTrue(names.contains(name));
- }
-
- // Query for all Vertices modified after 01/01/2015 00:00:00 GMT
- r = discoveryService.searchByGremlin("g.V.filter{it." + Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY + " > 1420070400000}.toList()");
- Assert.assertTrue(r instanceof List);
- resultList = (List<Map<String, Object>>) r;
- Assert.assertTrue(resultList.size() > 0);
- for (Map<String, Object> vertexProps : resultList) {
- Object object = vertexProps.get(Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY);
- assertNotNull(object);
- Long timestampAsLong = Long.valueOf((String)object);
- Assert.assertTrue(timestampAsLong > 1420070400000L);
- object = vertexProps.get(Constants.TIMESTAMP_PROPERTY_KEY);
- assertNotNull(object);
- }
- }
-
- @DataProvider(name = "comparisonQueriesProvider")
- private Object[][] createComparisonQueries() {
- //create queries the exercise the comparison logic for
- //all of the different supported data types
- return new Object[][] {
- {"Person where (birthday < \"1950-01-01T02:35:58.440Z\" )", 0},
- {"Person where (birthday > \"1975-01-01T02:35:58.440Z\" )", 2},
- {"Person where (birthday >= \"1975-01-01T02:35:58.440Z\" )", 2},
- {"Person where (birthday <= \"1950-01-01T02:35:58.440Z\" )", 0},
- {"Person where (birthday = \"1975-01-01T02:35:58.440Z\" )", 0},
- {"Person where (birthday != \"1975-01-01T02:35:58.440Z\" )", 4},
-
- {"Person where (hasPets = true)", 2},
- {"Person where (hasPets = false)", 2},
- {"Person where (hasPets != false)", 2},
- {"Person where (hasPets != true)", 2},
-
- {"Person where (numberOfCars > 0)", 2},
- {"Person where (numberOfCars > 1)", 1},
- {"Person where (numberOfCars >= 1)", 2},
- {"Person where (numberOfCars < 2)", 3},
- {"Person where (numberOfCars <= 2)", 4},
- {"Person where (numberOfCars = 2)", 1},
- {"Person where (numberOfCars != 2)", 3},
-
- {"Person where (houseNumber > 0)", 2},
- {"Person where (houseNumber > 17)", 1},
- {"Person where (houseNumber >= 17)", 2},
- {"Person where (houseNumber < 153)", 3},
- {"Person where (houseNumber <= 153)", 4},
- {"Person where (houseNumber = 17)", 1},
- {"Person where (houseNumber != 17)", 3},
-
- {"Person where (carMileage > 0)", 2},
- {"Person where (carMileage > 13)", 1},
- {"Person where (carMileage >= 13)", 2},
- {"Person where (carMileage < 13364)", 3},
- {"Person where (carMileage <= 13364)", 4},
- {"Person where (carMileage = 13)", 1},
- {"Person where (carMileage != 13)", 3},
-
- {"Person where (shares > 0)", 2},
- {"Person where (shares > 13)", 2},
- {"Person where (shares >= 16000)", 1},
- {"Person where (shares < 13364)", 2},
- {"Person where (shares <= 15000)", 3},
- {"Person where (shares = 15000)", 1},
- {"Person where (shares != 1)", 4},
-
- {"Person where (salary > 0)", 2},
- {"Person where (salary > 100000)", 2},
- {"Person where (salary >= 200000)", 1},
- {"Person where (salary < 13364)", 2},
- {"Person where (salary <= 150000)", 3},
- {"Person where (salary = 12334)", 0},
- {"Person where (salary != 12344)", 4},
-
- {"Person where (age > 36)", 1},
- {"Person where (age > 49)", 1},
- {"Person where (age >= 49)", 1},
- {"Person where (age < 50)", 3},
- {"Person where (age <= 35)", 2},
- {"Person where (age = 35)", 0},
- {"Person where (age != 35)", 4}
- };
- }
-
- @DataProvider(name = "dslQueriesProvider")
- private Object[][] createDSLQueries() {
- return new Object[][]{
- {"hive_db as inst where inst.name=\"Reporting\" select inst as id, inst.name", 1},
- {"from hive_db as h select h as id", 3},
- {"from hive_db", 3},
- {"hive_db", 3},
- {"hive_db where hive_db.name=\"Reporting\"", 1},
- {"hive_db hive_db.name = \"Reporting\"", 1},
- {"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
- {"hive_db has name", 3},
- {"hive_db, hive_table", 10},
- {"View is JdbcAccess", 2},
- {"hive_db as db1, hive_table where db1.name = \"Reporting\"", isGremlin3() ? 4 : 0}, //Not working in with Titan 0 - ATLAS-145
- // - Final working query -> discoveryService.searchByGremlin("L:{_var_0 = [] as Set;g.V().has(\"__typeName\", \"hive_db\").fill(_var_0);g.V().has(\"__superTypeNames\", \"hive_db\").fill(_var_0);_var_0._().as(\"db1\").in(\"__hive_table.db\").back(\"db1\").and(_().has(\"hive_db.name\", T.eq, \"Reporting\")).toList()}")
- /*
- {"hive_db, hive_process has name"}, //Invalid query
- {"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System.currentTimeMillis()}
- */
- {"from hive_table", 10},
- {"hive_table", 10},
- {"hive_table isa Dimension", 3},
- {"hive_column where hive_column isa PII", 8},
- {"View is Dimension" , 2},
-// {"hive_column where hive_column isa PII select hive_column.name", 6}, //Not working - ATLAS-175
- {"hive_column select hive_column.name", 37},
- {"hive_column select name",37},
- {"hive_column where hive_column.name=\"customer_id\"", 6},
- {"from hive_table select hive_table.name", 10},
- {"hive_db where (name = \"Reporting\")", 1},
- {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1", 1},
- {"hive_db where hive_db is JdbcAccess", 0}, //Not supposed to work
- {"hive_db hive_table", 10},
- {"hive_db where hive_db has name", 3},
- {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", isGremlin3() ? 4 : 0}, //Not working in Titan 0 -> ATLAS-145
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", 1},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
-
- /*
- todo: does not work - ATLAS-146
- {"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
- {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
- as dbName, tab.name as tabName"},
- {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name
- as dbName, tab.name as tabName"},
- {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
- select db1.name as dbName, tab.name as tabName"},
- {"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
- select db1.name as dbName, tab.name as tabName"},
- */
- // trait searches
- {"Dimension", 5},
- {"JdbcAccess", 2},
- {"ETL", 5},
- {"Metric", 9},
- {"PII", 8},
- {"`Log Data`", 4},
- // Not sure what the expected rows should be, but since we didn't assign or do anything with the created
- // I assume it'll be zero
- {"`isa`", 0},
-
- /* Lineage queries are fired through ClosureQuery and are tested through HiveLineageJerseyResourceIt in webapp module.
- Commenting out the below queries since DSL to Gremlin parsing/translation fails with lineage queries when there are array types
- used within loop expressions which is the case with DataSet.inputs and outputs.`
- // Lineage
- {"Table LoadProcess outputTable"}, {"Table loop (LoadProcess outputTable)"},
- {"Table as _loop0 loop (LoadProcess outputTable) withPath"},
- {"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as "
- + "destTable withPath"},
- */
-// {"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as "
-// + "colType", 0}, //Not working - ATLAS-145 and ATLAS-166
-
- {"hive_table where name='sales_fact', db where name='Sales'", 1},
- {"hive_table where name='sales_fact', db where name='Reporting'", 0},
- {"hive_partition as p where values = ['2015-01-01']", 1},
-// {"StorageDesc select cols", 6} //Not working since loading of lists needs to be fixed yet
-
- //check supertypeNames
- {"DataSet where name='sales_fact'", 1},
- {"Asset where name='sales_fact'", 1}
- };
- }
-
- @DataProvider(name = "dslExplicitLimitQueriesProvider")
- private Object[][] createDSLQueriesWithExplicitLimit() {
- return new Object[][]{
- {"hive_column", 37, 40, 0},//with higher limit all rows returned
- {"hive_column limit 10", 10, 50, 0},//lower limit in query
- {"hive_column select hive_column.name limit 10", 5, 5, 0},//lower limit in query param
- {"hive_column select hive_column.name withPath", 20, 20, 0},//limit only in params
- //with offset, only remaining rows returned
- {"hive_column select hive_column.name limit 40 withPath", 17, 40, 20},
- //with higher offset, no rows returned
- {"hive_column select hive_column.name limit 40 withPath", 0, 40, 40},
- //offset used from query
- {"hive_column select hive_column.name limit 40 offset 10", 27, 40, 0},
- //offsets in query and parameter added up
- {"hive_column select hive_column.name limit 40 offset 10", 17, 40, 10},
- //works with where clause
- {"hive_db where name = 'Reporting' limit 10 offset 0", 1, 40, 0},
- //works with joins
- {"hive_db, hive_table where db.name = 'Reporting' limit 10", 1, 1, 0},
- {"hive_column limit 25", 5, 10, 20}, //last page should return records limited by limit in query
- {"hive_column limit 25", 0, 10, 30}, //offset > limit returns 0 rows
- };
- }
-
- @DataProvider(name = "dslLimitQueriesProvider")
- private Object[][] createDSLQueriesWithLimit() {
- return new Object[][]{
- {"hive_column limit 10 ", 10},
- {"hive_column select hive_column.name limit 10 ", 10},
- {"hive_column select hive_column.name withPath", 37},
- {"hive_column select hive_column.name limit 10 withPath", 10},
-
- {"from hive_db", 3},
- {"from hive_db limit 2", 2},
- {"from hive_db limit 2 offset 0", 2},
- {"from hive_db limit 2 offset 1", 2},
- {"from hive_db limit 3 offset 1", 2},
- {"hive_db", 3},
- {"hive_db where hive_db.name=\"Reporting\"", 1},
- {"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 1 offset 1", 1},
- {"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 1 offset 2", 1},
- {"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 2 offset 1", 2},
- {"hive_db where hive_db.name=\"Reporting\" limit 10 ", 1},
- {"hive_db hive_db.name = \"Reporting\"", 1},
- {"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
- {"hive_db has name", 3},
- {"hive_db has name limit 2 offset 0", 2},
- {"hive_db has name limit 2 offset 1", 2},
- {"hive_db has name limit 10 offset 1", 2},
- {"hive_db has name limit 10 offset 0", 3},
- {"hive_db, hive_table", 10},
- {"hive_db, hive_table limit 5", 5},
- {"hive_db, hive_table limit 5 offset 0", 5},
- {"hive_db, hive_table limit 5 offset 5", 5},
-
- {"View is JdbcAccess", 2},
- {"View is JdbcAccess limit 1", 1},
- {"View is JdbcAccess limit 2 offset 1", 1},
- {"hive_db as db1, hive_table where db1.name = \"Reporting\"", isGremlin3() ? 4 : 0}, //Not working in Titan 0 - ATLAS-145
-
-
- {"from hive_table", 10},
- {"from hive_table limit 5", 5},
- {"from hive_table limit 5 offset 5", 5},
-
- {"hive_table", 10},
- {"hive_table limit 5", 5},
- {"hive_table limit 5 offset 5", 5},
-
- {"hive_table isa Dimension", 3},
- {"hive_table isa Dimension limit 2", 2},
- {"hive_table isa Dimension limit 2 offset 0", 2},
- {"hive_table isa Dimension limit 2 offset 1", 2},
- {"hive_table isa Dimension limit 3 offset 1", 2},
-
- {"hive_column where hive_column isa PII", 8},
- {"hive_column where hive_column isa PII limit 5", 5},
- {"hive_column where hive_column isa PII limit 5 offset 1", 5},
- {"hive_column where hive_column isa PII limit 5 offset 5", 3},
-
-
- {"View is Dimension" , 2},
- {"View is Dimension limit 1" , 1},
- {"View is Dimension limit 1 offset 1" , 1},
- {"View is Dimension limit 10 offset 1" , 1},
-
- {"hive_column select hive_column.name", 37},
- {"hive_column select hive_column.name limit 5", 5},
- {"hive_column select hive_column.name limit 5 offset 36", 1},
-
- {"hive_column select name", 37},
- {"hive_column select name limit 5", 5},
- {"hive_column select name limit 5 offset 36 ", 1},
-
- {"hive_column where hive_column.name=\"customer_id\"", 6},
- {"hive_column where hive_column.name=\"customer_id\" limit 2", 2},
- {"hive_column where hive_column.name=\"customer_id\" limit 2 offset 1", 2},
- {"hive_column where hive_column.name=\"customer_id\" limit 10 offset 3", 3},
-
- {"from hive_table select hive_table.name", 10},
- {"from hive_table select hive_table.name limit 5", 5},
- {"from hive_table select hive_table.name limit 5 offset 5", 5},
-
- {"hive_db where (name = \"Reporting\")", 1},
- {"hive_db where (name = \"Reporting\") limit 10", 1},
- {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1", 1},
- {"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 limit 10", 1},
- {"hive_db where hive_db is JdbcAccess", 0}, //Not supposed to work
- {"hive_db hive_table", 10},
- {"hive_db hive_table limit 5", 5},
- {"hive_db hive_table limit 5 offset 5", 5},
- {"hive_db where hive_db has name", 3},
- {"hive_db where hive_db has name limit 5", 3},
- {"hive_db where hive_db has name limit 2 offset 0", 2},
- {"hive_db where hive_db has name limit 2 offset 1", 2},
-
- {"hive_db as db1 hive_table where (db1.name = \"Reporting\")", isGremlin3() ? 4 : 0}, //Not working in Titan 0 -> ATLAS-145
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", 1},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10", 1},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 1", 0},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 0", 1},
-
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
-
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1", 0},
-
- // trait searches
- {"Dimension", 5},
- {"Dimension limit 2", 2},
- {"Dimension limit 2 offset 1", 2},
- {"Dimension limit 5 offset 4", 1},
-
- {"JdbcAccess", 2},
- {"JdbcAccess limit 5 offset 0", 2},
- {"JdbcAccess limit 2 offset 1", 1},
- {"JdbcAccess limit 1", 1},
-
- {"ETL", 5},
- {"ETL limit 2", 2},
- {"ETL limit 1", 1},
- {"ETL limit 1 offset 0", 1},
- {"ETL limit 2 offset 1", 2},
-
- {"Metric", 9},
- {"Metric limit 10", 9},
- {"Metric limit 2", 2},
- {"Metric limit 10 offset 1", 8},
-
-
-
- {"PII", 8},
- {"PII limit 10", 8},
- {"PII limit 2", 2},
- {"PII limit 10 offset 1", 7},
-
- {"`Log Data`", 4},
- {"`Log Data` limit 3", 3},
- {"`Log Data` limit 10 offset 2", 2},
-
-
- {"hive_table where name='sales_fact', db where name='Sales'", 1},
- {"hive_table where name='sales_fact', db where name='Sales' limit 10", 1},
- {"hive_table where name='sales_fact', db where name='Sales' limit 10 offset 1", 0},
- {"hive_table where name='sales_fact', db where name='Reporting'", 0},
- {"hive_table where name='sales_fact', db where name='Reporting' limit 10", 0},
- {"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1", 0},
- {"hive_partition as p where values = ['2015-01-01']", 1},
- {"hive_partition as p where values = ['2015-01-01'] limit 10", 1},
- {"hive_partition as p where values = ['2015-01-01'] limit 10 offset 1", 0},
-
- };
- }
-
- @DataProvider(name = "dslOrderByQueriesProvider")
- private Object[][] createDSLQueriesWithOrderBy() {
- Boolean isAscending = Boolean.TRUE;
- return new Object[][]{
- //test with alias
- // {"from hive_db select hive_db.name as 'o' orderby o limit 3", 3, "name", isAscending},
- {"from hive_db as h orderby h.owner limit 3", 3, "owner", isAscending},
- {"hive_column as c select c.name orderby hive_column.name ", 37, "c.name", isAscending},
- {"hive_column as c select c.name orderby hive_column.name limit 5", 5, "c.name", isAscending},
- {"hive_column as c select c.name orderby hive_column.name desc limit 5", 5, "c.name", !isAscending},
-
- {"from hive_db orderby hive_db.owner limit 3", 3, "owner", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name ", 37, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", !isAscending},
-
- {"from hive_db orderby owner limit 3", 3, "owner", isAscending},
- {"hive_column select hive_column.name orderby name ", 37, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby name limit 5", 5, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby name desc limit 5", 5, "hive_column.name", !isAscending},
-
- //Not working, the problem is in server code not figuring out how to sort. not sure if it is valid use case.
-// {"hive_db hive_table orderby 'hive_db.owner'", 10, "owner", isAscending},
-// {"hive_db hive_table orderby 'hive_db.owner' limit 5", 5, "owner", isAscending},
-// {"hive_db hive_table orderby 'hive_db.owner' limit 5 offset 5", 3, "owner", isAscending},
-
- {"hive_db select hive_db.description orderby hive_db.description limit 10 withPath", 3, "hive_db.description", isAscending},
- {"hive_db select hive_db.description orderby hive_db.description desc limit 10 withPath", 3, "hive_db.description", !isAscending},
-
- {"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name asc limit 10 withPath", 10, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name desc limit 10 withPath", 10, "hive_column.name", !isAscending},
- {"from hive_db orderby hive_db.owner limit 3", 3, "owner", isAscending},
- {"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", isAscending},
-
- {"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", isAscending},
- {"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", isAscending},
- {"hive_db has name orderby hive_db.owner limit 10 offset 0", 3, "owner", isAscending},
-
- {"from hive_table select hive_table.owner orderby hive_table.owner", 10, "hive_table.owner", isAscending},
- {"from hive_table select hive_table.owner orderby hive_table.owner limit 8", 8, "hive_table.owner", isAscending},
-
- {"hive_table orderby hive_table.name", 10, "name", isAscending},
-
- {"hive_table orderby hive_table.owner", 10, "owner", isAscending},
- {"hive_table orderby hive_table.owner limit 8", 8, "owner", isAscending},
- {"hive_table orderby hive_table.owner limit 8 offset 0", 8, "owner", isAscending},
- {"hive_table orderby hive_table.owner desc limit 8 offset 0", 8, "owner", !isAscending},
-
- //Not working because of existing bug Atlas-175
-// {"hive_table isa Dimension orderby hive_table.owner", 3, "hive_table.owner", isAscending},//order not working
-// {"hive_table isa Dimension orderby hive_table.owner limit 3", 3, "hive_table.owner", isAscending},
-// {"hive_table isa Dimension orderby hive_table.owner limit 3 offset 0", 3, "hive_table.owner", isAscending},
-// {"hive_table isa Dimension orderby hive_table.owner desc limit 3 offset 0", 3, "hive_table.owner", !isAscending},
-//
-// {"hive_column where hive_column isa PII orderby hive_column.name", 6, "hive_column.name", isAscending},
-// {"hive_column where hive_column isa PII orderby hive_column.name limit 5", 5, "hive_column.name", isAscending},
-// {"hive_column where hive_column isa PII orderby hive_column.name limit 5 offset 1", 5, "hive_column.name", isAscending},
-// {"hive_column where hive_column isa PII orderby hive_column.name desc limit 5 offset 1", 5, "hive_column.name", !isAscending},
-
- {"hive_column select hive_column.name orderby hive_column.name ", 37, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", !isAscending},
-
- {"hive_column select hive_column.name orderby hive_column.name limit 5 offset 28", 5, "hive_column.name", isAscending},
-
- {"hive_column select name orderby hive_column.name", 37, "name", isAscending},
- {"hive_column select name orderby hive_column.name limit 5", 5, "name", isAscending},
- {"hive_column select name orderby hive_column.name desc", 37, "name", !isAscending},
-
- {"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name", 6, "name", isAscending},
- {"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name limit 2", 2, "name", isAscending},
- {"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name limit 2 offset 1", 2, "name", isAscending},
-
- {"from hive_table select owner orderby hive_table.owner",10, "owner", isAscending},
- {"from hive_table select owner orderby hive_table.owner limit 5", 5, "owner", isAscending},
- {"from hive_table select owner orderby hive_table.owner desc limit 5", 5, "owner", !isAscending},
- {"from hive_table select owner orderby hive_table.owner limit 5 offset 5", 5, "owner", isAscending},
-
- {"hive_db where (name = \"Reporting\") orderby hive_db.name", 1, "name", isAscending},
- {"hive_db where (name = \"Reporting\") orderby hive_db.name limit 10", 1, "name", isAscending},
- {"hive_db where hive_db has name orderby hive_db.owner", 3, "owner", isAscending},
- {"hive_db where hive_db has name orderby hive_db.owner limit 5", 3, "owner", isAscending},
- {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", isAscending},
- {"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", isAscending},
-
-
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1'", 1, "_col_1", isAscending},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10", 1, "_col_1", isAscending},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 1", 0, "_col_1", isAscending},
- {"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", isAscending},
-
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", isAscending},
-
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", isAscending},
- {"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", isAscending},
-
- {"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", isAscending},
- {"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", isAscending},
- {"hive_table orderby 'hive_table.owner_notdefined'", 10, null, isAscending},
- };
- }
-
- @DataProvider(name = "dslGroupByQueriesProvider")
- private Object[][] createDSLGroupByQueries() {
- return new Object[][]{
- { "from Person as p, mentor as m groupby(m.name) select m.name, count()",
- new FieldValueValidator().withFieldNames("m.name", "count()").withExpectedValues("Max", 1)
- .withExpectedValues("Julius", 1) },
-
- // This variant of this query is currently failing. See OMS-335 for details.
- { "from Person as p, mentor groupby(mentor.name) select mentor.name, count()",
- new FieldValueValidator().withFieldNames("mentor.name", "count()").withExpectedValues("Max", 1)
- .withExpectedValues("Julius", 1) },
-
- { "from Person, mentor groupby(mentor.name) select mentor.name, count()",
- new FieldValueValidator().withFieldNames("mentor.name", "count()").withExpectedValues("Max", 1)
- .withExpectedValues("Julius", 1) },
-
- { "from Person, mentor as m groupby(m.name) select m.name, count()",
- new FieldValueValidator().withFieldNames("m.name", "count()").withExpectedValues("Max", 1)
- .withExpectedValues("Julius", 1) },
-
- { "from Person groupby (isOrganDonor) select count()",
- new FieldValueValidator().withFieldNames("count()").withExpectedValues(2)
- .withExpectedValues(2) },
- { "from Person groupby (isOrganDonor) select Person.isOrganDonor, count()",
- new FieldValueValidator().withFieldNames("Person.isOrganDonor", "count()")
- .withExpectedValues(true, 2).withExpectedValues(false, 2) },
-
- { "from Person groupby (isOrganDonor) select Person.isOrganDonor as 'organDonor', count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
- new FieldValueValidator().withFieldNames("organDonor", "max", "min", "count")
- .withExpectedValues(true, 50, 36, 2).withExpectedValues(false, 0, 0, 2) },
-
- { "from hive_db groupby (owner, name) select count() ", new FieldValueValidator()
- .withFieldNames("count()").withExpectedValues(1).withExpectedValues(1).withExpectedValues(1) },
-
- { "from hive_db groupby (owner, name) select hive_db.owner, hive_db.name, count() ",
- new FieldValueValidator().withFieldNames("hive_db.owner", "hive_db.name", "count()")
- .withExpectedValues("Jane BI", "Reporting", 1)
- .withExpectedValues("Tim ETL", "Logging", 1)
- .withExpectedValues("John ETL", "Sales", 1) },
-
- { "from hive_db groupby (owner) select count() ",
- new FieldValueValidator().withFieldNames("count()").withExpectedValues(1).withExpectedValues(1)
- .withExpectedValues(1) },
-
- { "from hive_db groupby (owner) select hive_db.owner, count() ",
- new FieldValueValidator().withFieldNames("hive_db.owner", "count()")
- .withExpectedValues("Jane BI", 1).withExpectedValues("Tim ETL", 1)
- .withExpectedValues("John ETL", 1) },
-
- { "from hive_db groupby (owner) select hive_db.owner, max(hive_db.name) ",
- new FieldValueValidator().withFieldNames("hive_db.owner", "max(hive_db.name)")
- .withExpectedValues("Tim ETL", "Logging").withExpectedValues("Jane BI", "Reporting")
- .withExpectedValues("John ETL", "Sales") },
-
- { "from hive_db groupby (owner) select max(hive_db.name) ",
- new FieldValueValidator().withFieldNames("max(hive_db.name)").withExpectedValues("Logging")
- .withExpectedValues("Reporting").withExpectedValues("Sales") },
-
- { "from hive_db groupby (owner) select owner, hive_db.name, min(hive_db.name) ",
- new FieldValueValidator().withFieldNames("owner", "hive_db.name", "min(hive_db.name)")
- .withExpectedValues("Tim ETL", "Logging", "Logging")
- .withExpectedValues("Jane BI", "Reporting", "Reporting")
- .withExpectedValues("John ETL", "Sales", "Sales") },
-
- { "from hive_db groupby (owner) select owner, min(hive_db.name) ",
- new FieldValueValidator().withFieldNames("owner", "min(hive_db.name)")
- .withExpectedValues("Tim ETL", "Logging").withExpectedValues("Jane BI", "Reporting")
- .withExpectedValues("John ETL", "Sales") },
-
- { "from hive_db groupby (owner) select min(name) ",
- new FieldValueValidator().withFieldNames("min(name)")
- .withExpectedValues("Reporting").withExpectedValues("Logging")
- .withExpectedValues("Sales") },
-
- { "from hive_db groupby (owner) select min('name') ",
- new FieldValueValidator().withFieldNames("min(\"name\")").withExpectedValues("name")
- .withExpectedValues("name").withExpectedValues("name") }, //finding the minimum of a constant literal expression...
-
- { "from hive_db groupby (owner) select name ",
- new FieldValueValidator().withFieldNames("name").withExpectedValues("Reporting")
- .withExpectedValues("Sales").withExpectedValues("Logging") },
-
- //implied group by
- { "from hive_db select count() ",
- new FieldValueValidator().withFieldNames("count()").withExpectedValues(3) },
- //implied group by
- { "from Person select count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
- new FieldValueValidator().withFieldNames("max", "min", "count").withExpectedValues(50, 0, 4) },
- //Sum
- { "from Person groupby (isOrganDonor) select count() as 'count', sum(Person.age) as 'sum'",
- new FieldValueValidator().withFieldNames("count", "sum").withExpectedValues(2, 0)
- .withExpectedValues(2, 86) },
- { "from Person groupby (isOrganDonor) select Person.isOrganDonor as 'organDonor', count() as 'count', sum(Person.age) as 'sum'",
- new FieldValueValidator().withFieldNames("organDonor", "count", "sum").withExpectedValues(false, 2, 0)
- .withExpectedValues(true, 2, 86) },
- { "from Person select count() as 'count', sum(Person.age) as 'sum'",
- new FieldValueValidator().withFieldNames("count", "sum").withExpectedValues(4, 86) },
- // tests to ensure that group by works with order by and limit
- { "from hive_db groupby (owner) select min(name) orderby name limit 2 ",
- new FieldValueValidator().withFieldNames("min(name)")
- .withExpectedValues("Logging").withExpectedValues("Reporting")
- },
-
- { "from hive_db groupby (owner) select min(name) orderby name desc limit 2 ",
- new FieldValueValidator().withFieldNames("min(name)")
- .withExpectedValues("Reporting").withExpectedValues("Sales")
- },
- };
- }
-
- @DataProvider(name = "dslObjectQueriesReturnIdProvider")
- private Object[][] createDSLObjectIdQueries() {
- return new Object[][] { {
- "from hive_db as h select h as id",
- new FieldValueValidator().withFieldNames("id")
- .withExpectedValues(idType).withExpectedValues(idType)
- .withExpectedValues(idType) }
- };
- }
-
- @Test(dataProvider = "dslOrderByQueriesProvider")
- public void testSearchByDSLQueriesWithOrderBy(String dslQuery, Integer expectedNumRows, String orderBy, boolean ascending) throws Exception {
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
-
- Object query = results.get("query");
- assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- assertNotNull(typeName);
-
- JSONArray rows = results.getJSONArray("rows");
-
- assertNotNull(rows);
- assertEquals(rows.length(), expectedNumRows.intValue()); // some queries may not have any results
- List<String> returnedList = new ArrayList<>();
- for (int i = 0; i < rows.length(); i++) {
- JSONObject row = rows.getJSONObject(i);
- try
- {
- returnedList.add(row.get(orderBy).toString());
- }
- catch(Exception ex)
- {
- System.out.println( " Exception occured " + ex.getMessage() + " found row: "+row);
- }
- }
- Iterator<String> iter = returnedList.iterator();
- String _current = null, _prev = null;
- if (orderBy != null) {
- // Following code compares the results in rows and makes sure data
- // is sorted as expected
- while (iter.hasNext()) {
- _prev = _current;
- _current = iter.next().toLowerCase();
- if (_prev != null && _prev.compareTo(_current) != 0) {
- if(ascending) {
- Assert.assertTrue(_prev.compareTo(_current) < 0, _prev + " is greater than " + _current);
- }
- else {
- Assert.assertTrue(_prev.compareTo(_current) > 0, _prev + " is less than " + _current);
- }
- }
- }
- }
-
- System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
- }
-
- @Test(dataProvider = "dslQueriesProvider")
- public void testSearchByDSLQueries(String dslQuery, Integer expectedNumRows) throws Exception {
- runQuery(dslQuery, expectedNumRows, 40, 0);
- }
-
- @Test(dataProvider = "comparisonQueriesProvider")
- public void testDataTypeComparisonQueries(String dslQuery, Integer expectedNumRows) throws Exception {
- runQuery(dslQuery, expectedNumRows, 40, 0);
- }
-
- @Test(dataProvider = "dslExplicitLimitQueriesProvider")
- public void testSearchByDSLQueriesWithExplicitLimit(String dslQuery, Integer expectedNumRows, int limit, int offset)
- throws Exception {
- runQuery(dslQuery, expectedNumRows, limit, offset);
- }
-
- public void runQuery(String dslQuery, Integer expectedNumRows, int limitParam, int offsetParam) throws Exception {
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = discoveryService.searchByDSL(dslQuery, new QueryParams(limitParam, offsetParam));
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
- System.out.println("results = " + results);
-
- Object query = results.get("query");
- assertNotNull(query);
-
- JSONObject dataType = results.getJSONObject("dataType");
- assertNotNull(dataType);
- String typeName = dataType.getString("typeName");
- assertNotNull(typeName);
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
- assertEquals( rows.length(), expectedNumRows.intValue(), "query [" + dslQuery + "] returned [" + rows.length() + "] rows. Expected " + expectedNumRows + " rows."); // some queries may not have any results
- System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
- }
-
- @Test(dataProvider = "dslLimitQueriesProvider")
- public void testSearchByDSLQueriesWithLimit(String dslQuery, Integer expectedNumRows) throws Exception {
- runQuery(dslQuery, expectedNumRows, 40, 0);
- }
-
- @DataProvider(name = "invalidDslQueriesProvider")
- private Object[][] createInvalidDSLQueries() {
- return new String[][]{{"from Unknown"}, {"Unknown"}, {"Unknown is Blah"},};
- }
-
- @Test(dataProvider = "invalidDslQueriesProvider", expectedExceptions = DiscoveryException.class)
- public void testSearchByDSLInvalidQueries(String dslQuery) throws Exception {
- System.out.println("Executing dslQuery = " + dslQuery);
- searchByDSL(dslQuery);
- Assert.fail();
- }
-
- @Test
- public void testSearchForTypeInheritance() throws Exception {
- createTypesWithMultiLevelInheritance();
- createInstances();
-
- String dslQuery = "from D where a = 1";
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- System.out.println("results = " + results);
- }
-
- @Test
- public void testSearchForTypeWithReservedKeywordAttributes() throws Exception {
- createTypesWithReservedKeywordAttributes();
-
- String dslQuery = "from OrderType where `order` = 1";
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- System.out.println("results = " + results);
- }
-
- /*
- * Type Hierarchy is:
- * A(a)
- * B(b) extends A
- * C(c) extends B
- * D(d) extends C
- */
- private void createTypesWithMultiLevelInheritance() throws Exception {
- HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
-
- HierarchicalTypeDefinition B =
- createClassTypeDef("B", ImmutableSet.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
-
- HierarchicalTypeDefinition C =
- createClassTypeDef("C", ImmutableSet.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
-
- HierarchicalTypeDefinition D =
- createClassTypeDef("D", ImmutableSet.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
-
- TypeSystem.getInstance().defineClassTypes(A, B, C, D);
- }
-
- private void createTypesWithReservedKeywordAttributes() throws Exception {
- HierarchicalTypeDefinition orderType = createClassTypeDef("OrderType", null, createRequiredAttrDef("order", DataTypes.INT_TYPE));
-
- HierarchicalTypeDefinition limitType =
- createClassTypeDef("LimitType", null, createOptionalAttrDef("limit", DataTypes.BOOLEAN_TYPE));
-
- TypeSystem.getInstance().defineClassTypes(orderType, limitType);
- }
-
- private void createInstances() throws Exception {
- Referenceable instance = new Referenceable("D");
- instance.set("d", 1);
- instance.set("c", 1);
- instance.set("b", true);
- instance.set("a", 1);
-
- ClassType deptType = TypeSystem.getInstance().getDataType(ClassType.class, "D");
- ITypedReferenceableInstance typedInstance = deptType.convert(instance, Multiplicity.REQUIRED);
-
- repositoryService.createEntities(typedInstance);
- }
-
- private void runCountGroupByQuery(String dslQuery, ResultChecker checker) throws Exception {
- runAndValidateQuery(dslQuery, checker);
- }
-
- private void runAndValidateQuery(String dslQuery, ResultChecker checker) throws Exception {
- System.out.println("Executing dslQuery = " + dslQuery);
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- assertEquals(results.length(), 3);
- Object query = results.get("query");
- assertNotNull(query);
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
- if (checker != null) {
- checker.validateResult(dslQuery, rows);
- }
-
- System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
- }
-
- @Test(dataProvider = "dslGroupByQueriesProvider")
- public void testSearchGroupByDSLQueries(String dslQuery, ResultChecker checker) throws Exception {
- runCountGroupByQuery(dslQuery, checker);
- }
-
- @Test(dataProvider = "dslObjectQueriesReturnIdProvider")
- public void testSearchObjectQueriesReturnId(String dslQuery,
- ResultChecker checker) throws Exception {
- runAndValidateQuery(dslQuery, checker);
- }
-
- private interface ResultChecker {
- void validateResult(String dslQuery, JSONArray foundRows) throws JSONException;
- }
-
- static class FieldValueValidator implements ResultChecker {
- static class ResultObject {
-
- private static String[] idTypeAttributes = { "id", "$typeName$",
- "state", "version" };
-
- @Override
- public String toString() {
- return "ResultObject [fieldValues_=" + fieldValues_ + "]";
- }
-
- Map<String, Object> fieldValues_ = new HashMap<>();
-
- public void setFieldValue(String string, Object object) {
- fieldValues_.put(string, object);
-
- }
-
- public boolean matches(JSONObject object) throws JSONException {
- for (Map.Entry<String, Object> requiredFieldsEntry : fieldValues_.entrySet()) {
- String fieldName = requiredFieldsEntry.getKey();
- Object expectedValue = requiredFieldsEntry.getValue();
- Object foundValue = null;
- if (expectedValue.getClass() == Integer.class) {
- foundValue = object.getInt(fieldName);
- } else if (expectedValue == idType) {
- return validateObjectIdType(object, fieldName);
- } else {
- foundValue = object.get(fieldName);
- }
- if (foundValue == null || !expectedValue.equals(foundValue)) {
- return false;
- }
- }
- return true;
- }
- // validates that returned object id contains all the required attributes.
- private boolean validateObjectIdType(JSONObject object,
- String fieldName) throws JSONException {
- JSONObject foundJson = object.getJSONObject(fieldName);
- for (String idAttr : idTypeAttributes) {
- if (foundJson.get(idAttr) == null) {
- return false;
- }
- }
- return true;
- }
- }
-
- private String[] fieldNames_;
- private List<ResultObject> expectedObjects_ = new ArrayList<>();
- public FieldValueValidator() {
-
- }
-
-
- public FieldValueValidator withFieldNames(String... fields) {
- fieldNames_ = fields;
- return this;
- }
-
- public FieldValueValidator withExpectedValues(Object... values) {
- ResultObject obj = new ResultObject();
- for (int i = 0; i < fieldNames_.length; i++) {
- obj.setFieldValue(fieldNames_[i], values[i]);
- }
- expectedObjects_.add(obj);
- return this;
- }
-
- @Override
- public void validateResult(String dslQuery, JSONArray foundRows) throws JSONException {
-
- //make sure that all required rows are found
- Assert.assertEquals(foundRows.length(), expectedObjects_.size(),
- "The wrong number of objects was returned for query " + dslQuery + ". Expected "
- + expectedObjects_.size() + ", found " + foundRows.length());
-
- for (ResultObject required : expectedObjects_) {
- //not exactly efficient, but this is test code
- boolean found = false;
- for (int i = 0; i < foundRows.length(); i++) {
- JSONObject row = foundRows.getJSONObject(i);
- System.out.println(" found row "+ row);
- if (required.matches(row)) {
- found = true;
- break;
- }
- }
- if (!found) {
- Assert.fail("The result for " + dslQuery + " is wrong. The required row " + required
- + " was not found in " + foundRows);
- }
- }
- }
-
- }
-
- static class CountOnlyValidator implements ResultChecker {
- private List<Integer> expectedCounts = new ArrayList<Integer>();
- private int countColumn = 0;
-
- public CountOnlyValidator() {
-
- }
-
-
- public CountOnlyValidator withCountColumn(int col) {
- countColumn = col;
- return this;
- }
-
- public CountOnlyValidator withExpectedCounts(Integer... counts) {
- expectedCounts.addAll(Arrays.asList(counts));
- return this;
- }
-
- @Override
- public void validateResult(String dslQuery, JSONArray foundRows) throws JSONException {
- assertEquals(foundRows.length(), expectedCounts.size());
- for (int i = 0; i < foundRows.length(); i++) {
-
- JSONArray row = foundRows.getJSONArray(i);
- assertEquals(row.length(), 1);
- int foundCount = row.getInt(countColumn);
- // assertTrue(expectedCounts.contains(foundCount));
- }
- }
-
- }
-
- @Test
- public void testSearchForTypeWithNoInstances() throws Exception {
-
- HierarchicalTypeDefinition EMPTY = createClassTypeDef("EmptyType", null,
- createRequiredAttrDef("a", DataTypes.INT_TYPE));
- TypeSystem.getInstance().defineClassTypes(EMPTY);
-
- String dslQuery = "EmptyType";
- String jsonResults = searchByDSL(dslQuery);
- assertNotNull(jsonResults);
- JSONObject results = new JSONObject(jsonResults);
-
- assertEquals(results.length(), 3);
-
- JSONArray rows = results.getJSONArray("rows");
- assertNotNull(rows);
-
- // query should not return any rows
- assertEquals(rows.length(), 0);
- }
-
- @Test
- public void testTypePreservedWhenFilterTraversesEdges() throws DiscoveryException, JSONException {
-
- String dsl = "hive_table db.name=\"Reporting\" limit 10";
- ImmutableSet<String> expectedTableNames = ImmutableSet.of("table1", "table2", "sales_fact_monthly_mv", "sales_fact_daily_mv");
- String jsonResults = discoveryService.searchByDSL(dsl, null);
- assertNotNull(jsonResults);
-
- JSONObject results = new JSONObject(jsonResults);
- JSONArray rows = results.getJSONArray("rows");
- assertEquals(rows.length(), expectedTableNames.size());
- for(int i = 0; i < rows.length(); i++) {
- JSONObject row = rows.getJSONObject(i);
- Assert.assertTrue(expectedTableNames.contains(row.get("name")));
- }
- }
-
- private FieldValueValidator makeCountValidator(int count) {
- return new FieldValueValidator().withFieldNames("count()").withExpectedValues(count);
- }
-
- private FieldValueValidator makeNoResultsValidator() {
- return new FieldValueValidator();
- }
-
- private boolean isGremlin3() {
- return TestUtils.getGraph().getSupportedGremlinVersion() == GremlinVersion.THREE;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/lineage/EntityLineageServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/lineage/EntityLineageServiceTest.java b/repository/src/test/java/org/apache/atlas/lineage/EntityLineageServiceTest.java
deleted file mode 100644
index 202f20c..0000000
--- a/repository/src/test/java/org/apache/atlas/lineage/EntityLineageServiceTest.java
+++ /dev/null
@@ -1,356 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.lineage;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.BaseRepositoryTest;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.discovery.EntityLineageService;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.model.instance.AtlasEntity.Status;
-import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.model.legacy.EntityResult;
-import org.apache.atlas.model.lineage.AtlasLineageInfo;
-import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
-import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageRelation;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.commons.collections.ArrayStack;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-/**
- * Unit tests for the new v2 Instance LineageService.
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class EntityLineageServiceTest extends BaseRepositoryTest {
-
- @Inject
- private EntityLineageService lineageService;
-
- @BeforeClass
- public void setUp() throws Exception {
- super.setUp();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- /**
- * Circular Lineage Test.
- */
- @Test
- public void testCircularLineage() throws Exception{
- TestUtils.skipForGremlin3EnabledGraphDb();
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", "table2");
- AtlasLineageInfo circularLineage = getInputLineageInfo(entityGuid, 5);
-
- assertNotNull(circularLineage);
- System.out.println("circular lineage = " + circularLineage);
-
- Map<String, AtlasEntityHeader> entities = circularLineage.getGuidEntityMap();
- assertNotNull(entities);
-
- Set<LineageRelation> relations = circularLineage.getRelations();
- assertNotNull(relations);
-
- Assert.assertEquals(entities.size(), 4);
- Assert.assertEquals(relations.size(), 4);
- Assert.assertEquals(circularLineage.getLineageDepth(), 5);
- Assert.assertEquals(circularLineage.getLineageDirection(), LineageDirection.INPUT);
-
- assertTrue(entities.containsKey(circularLineage.getBaseEntityGuid()));
- }
-
- /**
- * Input Lineage Tests.
- */
- @Test(dataProvider = "invalidQueryParamsProvider")
- public void testGetInputLineageInfoInvalidParams(final String guid, final AtlasLineageInfo.LineageDirection direction, final int depth, AtlasErrorCode errorCode) throws Exception {
- testInvalidQueryParams(errorCode, new Invoker() {
- @Override
- void run() throws AtlasBaseException {
- lineageService.getAtlasLineageInfo(guid, direction, depth);
- }
- });
- }
-
- @Test
- public void testGetInputLineageInfo() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", "sales_fact_monthly_mv");
- AtlasLineageInfo inputLineage = getInputLineageInfo(entityGuid, 4);
-
- assertNotNull(inputLineage);
- System.out.println("input lineage = " + inputLineage);
-
- Map<String, AtlasEntityHeader> entities = inputLineage.getGuidEntityMap();
- assertNotNull(entities);
-
- Set<LineageRelation> relations = inputLineage.getRelations();
- assertNotNull(relations);
-
- Assert.assertEquals(entities.size(), 6);
- Assert.assertEquals(relations.size(), 5);
- Assert.assertEquals(inputLineage.getLineageDepth(), 4);
- Assert.assertEquals(inputLineage.getLineageDirection(), LineageDirection.INPUT);
-
- assertTrue(entities.containsKey(inputLineage.getBaseEntityGuid()));
- }
-
- /**
- * Output Lineage Tests.
- */
- @Test(dataProvider = "invalidQueryParamsProvider")
- public void testGetOutputLineageInvalidParams(final String guid, final LineageDirection direction, final int depth, AtlasErrorCode errorCode) throws Exception {
- testInvalidQueryParams(errorCode, new Invoker() {
- @Override
- void run() throws AtlasBaseException {
- lineageService.getAtlasLineageInfo(guid, direction, depth);
- }
- });
- }
-
- @Test
- public void testGetOutputLineageInfo() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", "sales_fact");
- AtlasLineageInfo outputLineage = getOutputLineageInfo(entityGuid, 4);
-
- assertNotNull(outputLineage);
- System.out.println("output lineage = " + outputLineage);
-
- Map<String, AtlasEntityHeader> entities = outputLineage.getGuidEntityMap();
- assertNotNull(entities);
-
- Set<LineageRelation> relations = outputLineage.getRelations();
- assertNotNull(relations);
-
- Assert.assertEquals(entities.size(), 5);
- Assert.assertEquals(relations.size(), 4);
- Assert.assertEquals(outputLineage.getLineageDepth(), 4);
- Assert.assertEquals(outputLineage.getLineageDirection(), LineageDirection.OUTPUT);
-
- assertTrue(entities.containsKey(outputLineage.getBaseEntityGuid()));
- }
-
- /**
- * Both Lineage Tests.
- */
- @Test(dataProvider = "invalidQueryParamsProvider")
- public void testGetLineageInfoInvalidParams(final String guid, final LineageDirection direction, final int depth, AtlasErrorCode errorCode) throws Exception {
- testInvalidQueryParams(errorCode, new Invoker() {
- @Override
- void run() throws AtlasBaseException {
- lineageService.getAtlasLineageInfo(guid, direction, depth);
- }
- });
- }
-
- @Test
- public void testGetLineageInfo() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", "sales_fact_monthly_mv");
- AtlasLineageInfo bothLineage = getBothLineageInfo(entityGuid, 5);
-
- assertNotNull(bothLineage);
- System.out.println("both lineage = " + bothLineage);
-
- Map<String, AtlasEntityHeader> entities = bothLineage.getGuidEntityMap();
- assertNotNull(entities);
-
- Set<LineageRelation> relations = bothLineage.getRelations();
- assertNotNull(relations);
-
- Assert.assertEquals(entities.size(), 6);
- Assert.assertEquals(relations.size(), 5);
- Assert.assertEquals(bothLineage.getLineageDepth(), 5);
- Assert.assertEquals(bothLineage.getLineageDirection(), AtlasLineageInfo.LineageDirection.BOTH);
-
- assertTrue(entities.containsKey(bothLineage.getBaseEntityGuid()));
- }
-
- @DataProvider(name = "invalidQueryParamsProvider")
- private Object[][] params() throws Exception {
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", "sales_fact_monthly_mv");
-
- // String guid, LineageDirection direction, int depth, AtlasErrorCode errorCode
-
- return new Object[][]{
- {"", null, 0, AtlasErrorCode.INSTANCE_GUID_NOT_FOUND},
- {" ", null, 0, AtlasErrorCode.INSTANCE_GUID_NOT_FOUND},
- {null, null, 0, AtlasErrorCode.INSTANCE_GUID_NOT_FOUND},
- {"invalidGuid", LineageDirection.OUTPUT, 6, AtlasErrorCode.INSTANCE_GUID_NOT_FOUND},
- {entityGuid, null, -10, AtlasErrorCode.INSTANCE_LINEAGE_INVALID_PARAMS},
- {entityGuid, null, 5, AtlasErrorCode.INSTANCE_LINEAGE_INVALID_PARAMS}
- };
- }
-
- abstract class Invoker {
- abstract void run() throws AtlasBaseException;
- }
-
- public void testInvalidQueryParams(AtlasErrorCode expectedErrorCode, Invoker Invoker) throws Exception {
- try {
- Invoker.run();
- fail("Expected " + expectedErrorCode.toString());
- } catch(AtlasBaseException e) {
- assertEquals(e.getAtlasErrorCode(), expectedErrorCode);
- }
- }
-
- private AtlasLineageInfo getInputLineageInfo(String guid, int depth) throws Exception {
- return lineageService.getAtlasLineageInfo(guid, LineageDirection.INPUT, depth);
- }
-
- private AtlasLineageInfo getOutputLineageInfo(String guid, int depth) throws Exception {
- return lineageService.getAtlasLineageInfo(guid, AtlasLineageInfo.LineageDirection.OUTPUT, depth);
- }
-
- private AtlasLineageInfo getBothLineageInfo(String guid, int depth) throws Exception {
- return lineageService.getAtlasLineageInfo(guid, AtlasLineageInfo.LineageDirection.BOTH, depth);
- }
-
- @Test
- public void testNewLineageWithDelete() throws Exception {
- TestUtils.skipForGremlin3EnabledGraphDb();
- String tableName = "table" + random();
- createTable(tableName, 3, true);
- String entityGuid = getEntityId(HIVE_TABLE_TYPE, "name", tableName);
-
- AtlasLineageInfo inputLineage = getInputLineageInfo(entityGuid, 5);
- assertNotNull(inputLineage);
- System.out.println("input lineage = " + inputLineage);
-
- Map<String, AtlasEntityHeader> entitiesInput = inputLineage.getGuidEntityMap();
- assertNotNull(entitiesInput);
- assertEquals(entitiesInput.size(), 3);
-
- Set<LineageRelation> relationsInput = inputLineage.getRelations();
- assertNotNull(relationsInput);
- assertEquals(relationsInput.size(), 2);
-
- AtlasEntityHeader tableEntityInput = entitiesInput.get(entityGuid);
- assertEquals(tableEntityInput.getStatus(), Status.ACTIVE);
-
- AtlasLineageInfo outputLineage = getOutputLineageInfo(entityGuid, 5);
- assertNotNull(outputLineage);
- System.out.println("output lineage = " + outputLineage);
-
- Map<String, AtlasEntityHeader> entitiesOutput = outputLineage.getGuidEntityMap();
- assertNotNull(entitiesOutput);
- assertEquals(entitiesOutput.size(), 3);
-
- Set<LineageRelation> relationsOutput = outputLineage.getRelations();
- assertNotNull(relationsOutput);
- assertEquals(relationsOutput.size(), 2);
-
- AtlasEntityHeader tableEntityOutput = entitiesOutput.get(entityGuid);
- assertEquals(tableEntityOutput.getStatus(), Status.ACTIVE);
-
- AtlasLineageInfo bothLineage = getBothLineageInfo(entityGuid, 5);
- assertNotNull(bothLineage);
- System.out.println("both lineage = " + bothLineage);
-
- Map<String, AtlasEntityHeader> entitiesBoth = bothLineage.getGuidEntityMap();
- assertNotNull(entitiesBoth);
- assertEquals(entitiesBoth.size(), 5);
-
- Set<LineageRelation> relationsBoth = bothLineage.getRelations();
- assertNotNull(relationsBoth);
- assertEquals(relationsBoth.size(), 4);
-
- AtlasEntityHeader tableEntityBoth = entitiesBoth.get(entityGuid);
- assertEquals(tableEntityBoth.getStatus(), Status.ACTIVE);
-
- //Delete the table entity. Lineage for entity returns the same results as before.
- //Lineage for table name throws EntityNotFoundException
- EntityResult deleteResult = repository.deleteEntities(Arrays.asList(entityGuid));
- assertTrue(deleteResult.getDeletedEntities().contains(entityGuid));
-
- inputLineage = getInputLineageInfo(entityGuid, 5);
- tableEntityInput = inputLineage.getGuidEntityMap().get(entityGuid);
- assertEquals(tableEntityInput.getStatus(), Status.DELETED);
- assertEquals(inputLineage.getGuidEntityMap().size(), 3);
-
- outputLineage = getOutputLineageInfo(entityGuid, 5);
- tableEntityOutput = outputLineage.getGuidEntityMap().get(entityGuid);
- assertEquals(tableEntityOutput.getStatus(), Status.DELETED);
- assertEquals(outputLineage.getGuidEntityMap().size(), 3);
-
- bothLineage = getBothLineageInfo(entityGuid, 5);
- tableEntityBoth = bothLineage.getGuidEntityMap().get(entityGuid);
- assertEquals(tableEntityBoth.getStatus(), Status.DELETED);
- assertEquals(bothLineage.getGuidEntityMap().size(), 5);
-
- }
-
- private void createTable(String tableName, int numCols, boolean createLineage) throws Exception {
- String dbId = getEntityId(DATABASE_TYPE, "name", "Sales");
- Id salesDB = new Id(dbId, 0, DATABASE_TYPE);
-
- //Create the entity again and schema should return the new schema
- List<Referenceable> columns = new ArrayStack();
- for (int i = 0; i < numCols; i++) {
- columns.add(column("col" + random(), "int", "column descr"));
- }
-
- Referenceable sd =
- storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true,
- ImmutableList.of(column("time_id", "int", "time id")));
-
- Id table = table(tableName, "test table", salesDB, sd, "fetl", "External", columns);
- if (createLineage) {
- Id inTable = table("table" + random(), "test table", salesDB, sd, "fetl", "External", columns);
- Id outTable = table("table" + random(), "test table", salesDB, sd, "fetl", "External", columns);
- loadProcess("process" + random(), "hive query for monthly summary", "Tim ETL", ImmutableList.of(inTable),
- ImmutableList.of(table), "create table as select ", "plan", "id", "graph", "ETL");
- loadProcess("process" + random(), "hive query for monthly summary", "Tim ETL", ImmutableList.of(table),
- ImmutableList.of(outTable), "create table as select ", "plan", "id", "graph", "ETL");
- }
- }
-
- private String random() {
- return TestUtils.randomString(5);
- }
-
- private String getEntityId(String typeName, String attributeName, String attributeValue) throws Exception {
- return repository.getEntityDefinition(typeName, attributeName, attributeValue).getId()._getId();
- }
-
-}
[06/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeInheritanceTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeInheritanceTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeInheritanceTest.java
deleted file mode 100644
index c13ef3a..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeInheritanceTest.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Struct;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
-
-/**
- * Unit tests for type inheritance.
- */
-public class TypeInheritanceTest extends BaseTest {
-
- @BeforeMethod
- public void setup() throws Exception {
- TypeSystem.getInstance().reset();
- super.setup();
- }
-
- /*
- * Type Hierarchy is:
- * A(a)
- * B(b) extends A
- */
- @Test
- public void testSimpleInheritance() throws AtlasException {
- HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
-
- HierarchicalTypeDefinition B =
- createClassTypeDef("B", ImmutableSet.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
-
- defineClasses(A, B);
-
- ClassType BType = getTypeSystem().getDataType(ClassType.class, "B");
-
- Struct s1 = new Struct("B");
- s1.set("b", true);
- s1.set("a", 1);
-
- ITypedInstance ts = BType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\tid : (type: B, id: <unassigned>)\n" +
- "\tb : \ttrue\n" +
- "\ta : \t1\n" +
- "}");
- }
-
- /*
- * Type Hierarchy is:
- * A(a, b)
- * B(b) extends A
- */
- @Test
- public void testSimpleInheritanceWithOverrides() throws AtlasException {
- HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createRequiredAttrDef("b", DataTypes.BOOLEAN_TYPE));
-
- HierarchicalTypeDefinition B =
- createClassTypeDef("B", ImmutableSet.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
-
- defineClasses(A, B);
-
- ClassType BType = getTypeSystem().getDataType(ClassType.class, "B");
-
- Struct s1 = new Struct("B");
- s1.set("b", true);
- s1.set("a", 1);
- s1.set("A.B.b", false);
-
- ITypedInstance ts = BType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\tid : (type: B, id: <unassigned>)\n" +
- "\tb : \ttrue\n" +
- "\ta : \t1\n" +
- "\tA.B.b : \tfalse\n" +
- "}");
- }
-
- /*
- * Type Hierarchy is:
- * A(a)
- * B(b) extends A
- * C(c) extends B
- * D(d) extends C
- */
- @Test
- public void testMultiLevelInheritance() throws AtlasException {
- HierarchicalTypeDefinition A = createClassTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE));
-
- HierarchicalTypeDefinition B =
- createClassTypeDef("B", ImmutableSet.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
-
- HierarchicalTypeDefinition C =
- createClassTypeDef("C", ImmutableSet.of("B"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
-
- HierarchicalTypeDefinition D =
- createClassTypeDef("D", ImmutableSet.of("C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
-
- defineClasses(A, B, C, D);
-
- ClassType DType = getTypeSystem().getDataType(ClassType.class, "D");
-
- Struct s1 = new Struct("D");
- s1.set("d", 1);
- s1.set("c", 1);
- s1.set("b", true);
- s1.set("a", 1);
-
- ITypedInstance ts = DType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\tid : (type: D, id: <unassigned>)\n" +
- "\td : \t1\n" +
- "\tc : \t1\n" +
- "\tb : \ttrue\n" +
- "\ta : \t1\n" +
- "}");
- }
-
- /*
- * Type Hierarchy is:
- * A(a,b,c,d)
- * B(b) extends A
- * C(c) extends A
- * D(d) extends B,C
- *
- * - There are a total of 11 fields in an instance of D
- * - an attribute that is hidden by a SubType can referenced by prefixing it with the
- * complete Path.
- * For e.g. the 'b' attribute in A (that is a superType for B) is hidden the 'b' attribute
- * in B.
- * So it is availabel by the name 'A.B.D.b'
- *
- * - Another way to set attributes is to cast. Casting a 'D' instance of 'B' makes the 'A.B.D
- * .b' attribute
- * available as 'A.B.b'. Casting one more time to an 'A' makes the 'A.B.b' attribute
- * available as 'b'.
- */
- @Test
- public void testDiamondInheritance() throws AtlasException {
- HierarchicalTypeDefinition A = createTraitTypeDef("A", null, createRequiredAttrDef("a", DataTypes.INT_TYPE),
- createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
- HierarchicalTypeDefinition B =
- createTraitTypeDef("B", ImmutableSet.of("A"), createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
- HierarchicalTypeDefinition C =
- createTraitTypeDef("C", ImmutableSet.of("A"), createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
- HierarchicalTypeDefinition D =
- createTraitTypeDef("D", ImmutableSet.of("B", "C"), createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
-
- defineTraits(A, B, C, D);
-
- TraitType DType = getTypeSystem().getDataType(TraitType.class, "D");
-
- Struct s1 = new Struct("D");
- s1.set("d", 1);
- s1.set("c", 1);
- s1.set("b", true);
- s1.set("a", 1);
- s1.set("A.B.D.b", true);
- s1.set("A.B.D.c", 2);
- s1.set("A.B.D.d", 2);
-
- s1.set("A.C.D.a", 3);
- s1.set("A.C.D.b", false);
- s1.set("A.C.D.c", 3);
- s1.set("A.C.D.d", 3);
-
-
- ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \ttrue\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
-
- /*
- * cast to B and set the 'b' attribute on A.
- */
- TraitType BType = getTypeSystem().getDataType(TraitType.class, "B");
- IStruct s2 = DType.castAs(ts, "B");
- s2.set("A.B.b", false);
-
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \tfalse\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
-
- /*
- * cast again to A and set the 'b' attribute on A.
- */
- IStruct s3 = BType.castAs(s2, "A");
- s3.set("b", true);
- Assert.assertEquals(ts.toString(), "{\n" +
- "\td : \t1\n" +
- "\tb : \ttrue\n" +
- "\tc : \t1\n" +
- "\ta : \t1\n" +
- "\tA.B.D.b : \ttrue\n" +
- "\tA.B.D.c : \t2\n" +
- "\tA.B.D.d : \t2\n" +
- "\tA.C.D.a : \t3\n" +
- "\tA.C.D.b : \tfalse\n" +
- "\tA.C.D.c : \t3\n" +
- "\tA.C.D.d : \t3\n" +
- "}");
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java
deleted file mode 100755
index 0ef5d10..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeSystemTest.java
+++ /dev/null
@@ -1,327 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.exception.TypeExistsException;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.commons.lang3.RandomStringUtils;
-import org.testng.Assert;
-import org.testng.annotations.AfterMethod;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import scala.actors.threadpool.Arrays;
-
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createClassTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createStructTypeDef;
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-public class TypeSystemTest extends BaseTest {
-
- public static final long TEST_DATE_IN_LONG = 1418265358440L;
- public static final String TEST_DATE_STRING = "2014-12-11T02:35:58.440Z";
-
- @BeforeClass
- public void setUp() throws Exception {
- super.setup();
- }
-
- @AfterMethod
- public void tearDown() throws Exception {
- getTypeSystem().reset();
- }
-
- @Test
- public void testGetTypeNames() throws Exception {
- getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
- new EnumValue("3", 3));
- assertTrue(getTypeSystem().getTypeNames().contains("enum_test"));
- }
-
- @Test
- public void testGetTypeDescription() throws Exception {
- String typeName = "enum_type";
- String description = "_description";
- String typeDescription = typeName + description;
- getTypeSystem().defineEnumType(typeName, typeDescription, new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
- new EnumValue("3", 3));
- assertTrue(getTypeSystem().getTypeNames().contains(typeName));
- IDataType type = getTypeSystem().getDataType(EnumType.class, typeName);
- Assert.assertNotNull(type);
- Assert.assertEquals(type.getDescription(), typeDescription);
-
- typeName = "trait_type";
- typeDescription = typeName + description;
- HierarchicalTypeDefinition<TraitType> trait = TypesUtil
- .createTraitTypeDef(typeName, typeDescription, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- getTypeSystem().defineTraitType(trait);
- assertTrue(getTypeSystem().getTypeNames().contains(typeName));
- type = getTypeSystem().getDataType(TraitType.class, typeName);
- Assert.assertNotNull(type);
- Assert.assertEquals(type.getDescription(), typeDescription);
-
- typeName = "class_type";
- typeDescription = typeName + description;
- HierarchicalTypeDefinition<ClassType> classType = TypesUtil
- .createClassTypeDef(typeName, typeDescription, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- getTypeSystem().defineClassType(classType);
- assertTrue(getTypeSystem().getTypeNames().contains(typeName));
- type = getTypeSystem().getDataType(ClassType.class, typeName);
- Assert.assertNotNull(type);
- Assert.assertEquals(type.getDescription(), typeDescription);
-
- typeName = "struct_type";
- typeDescription = typeName + description;
- getTypeSystem().defineStructType(typeName, typeDescription, true, createRequiredAttrDef("a", DataTypes.INT_TYPE));
- assertTrue(getTypeSystem().getTypeNames().contains(typeName));
- type = getTypeSystem().getDataType(StructType.class, typeName);
- Assert.assertNotNull(type);
- Assert.assertEquals(type.getDescription(), typeDescription);
-
- }
-
- @Test
- public void testIsRegistered() throws Exception {
- getTypeSystem().defineEnumType("enum_test", new EnumValue("0", 0), new EnumValue("1", 1), new EnumValue("2", 2),
- new EnumValue("3", 3));
- assertTrue(getTypeSystem().isRegistered("enum_test"));
- }
-
- @Test
- public void testGetTraitsNames() throws Exception {
- HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
- .createTraitTypeDef("Classification", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef("PII", ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> phiTrait =
- TypesUtil.createTraitTypeDef("PHI", ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> pciTrait =
- TypesUtil.createTraitTypeDef("PCI", ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> soxTrait =
- TypesUtil.createTraitTypeDef("SOX", ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> secTrait =
- TypesUtil.createTraitTypeDef("SEC", ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> financeTrait =
- TypesUtil.createTraitTypeDef("Finance", ImmutableSet.<String>of());
-
- getTypeSystem().defineTypes(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(classificationTraitDefinition, piiTrait, phiTrait, pciTrait, soxTrait, secTrait,
- financeTrait), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
-
- final ImmutableList<String> traitsNames = getTypeSystem().getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
- Assert.assertEquals(traitsNames.size(), 7);
- List traits = Arrays.asList(new String[]{"Classification", "PII", "PHI", "PCI", "SOX", "SEC", "Finance",});
-
- Assert.assertFalse(Collections.disjoint(traitsNames, traits));
- }
-
- private String random() {
- return RandomStringUtils.random(10);
- }
-
- @Test
- public void testUTFNames() throws Exception {
- TypeSystem ts = getTypeSystem();
-
- String enumType = random();
- EnumTypeDefinition orgLevelEnum =
- new EnumTypeDefinition(enumType, new EnumValue(random(), 1), new EnumValue(random(), 2));
-
- String structName = random();
- String attrType = random();
- StructTypeDefinition structType =
- createStructTypeDef(structName, createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
-
- String className = random();
- HierarchicalTypeDefinition<ClassType> classType = createClassTypeDef(className, ImmutableSet.<String>of(),
- createRequiredAttrDef(attrType, DataTypes.STRING_TYPE));
-
- String traitName = random();
- HierarchicalTypeDefinition<TraitType> traitType = createTraitTypeDef(traitName, ImmutableSet.<String>of(),
- createRequiredAttrDef(attrType, DataTypes.INT_TYPE));
-
- ts.defineTypes(ImmutableList.of(orgLevelEnum), ImmutableList.of(structType),
- ImmutableList.of(traitType), ImmutableList.of(classType));
- }
-
- @Test
- public void testTypeCategory() throws AtlasException {
- TypeSystem ts = getTypeSystem();
- ts.reset();
-
- StructTypeDefinition struct_A = createStructTypeDef("struct_A", createRequiredAttrDef("s_A", DataTypes.STRING_TYPE));
- StructTypeDefinition struct_B = createStructTypeDef("struct_B", createRequiredAttrDef("s_B", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<TraitType> trait_A = createTraitTypeDef("trait_A", null,
- createRequiredAttrDef("t_A", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> trait_B = createTraitTypeDef("trait_B", ImmutableSet.of("trait_A"),
- createRequiredAttrDef("t_B", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> trait_C = createTraitTypeDef("trait_C", ImmutableSet.of("trait_A"),
- createRequiredAttrDef("t_C", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> trait_D = createTraitTypeDef("trait_D", ImmutableSet.of("trait_B", "trait_C"),
- createRequiredAttrDef("t_D", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> class_A = createClassTypeDef("class_A", null,
- createRequiredAttrDef("c_A", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> class_B = createClassTypeDef("class_B", ImmutableSet.of("class_A"),
- createRequiredAttrDef("c_B", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> class_C = createClassTypeDef("class_C", ImmutableSet.of("class_B"),
- createRequiredAttrDef("c_C", DataTypes.STRING_TYPE));
-
- ts.defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.of(struct_A, struct_B),
- ImmutableList.of(trait_A, trait_B, trait_C, trait_D),
- ImmutableList.of(class_A, class_B, class_C));
-
- final ImmutableList<String> structNames = ts.getTypeNamesByCategory(DataTypes.TypeCategory.STRUCT);
- final ImmutableList<String> traitNames = ts.getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
- final ImmutableList<String> classNames = ts.getTypeNamesByCategory(DataTypes.TypeCategory.CLASS);
-
- Assert.assertEquals(structNames.size(), 2);
- Assert.assertEquals(traitNames.size(), 4);
- Assert.assertEquals(classNames.size(), 3);
- }
-
- @Test
- public void testTypeNamesAreNotDuplicated() throws Exception {
- TypeSystem typeSystem = getTypeSystem();
- ImmutableList<String> traitNames = typeSystem.getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
- int numTraits = traitNames.size();
-
- HashMap<String, IDataType> typesAdded = new HashMap<>();
- String traitName = "dup_type_test" + random();
- TraitType traitType = new TraitType(typeSystem, traitName, null, null, 0);
- typesAdded.put(traitName, traitType);
- typeSystem.commitTypes(typesAdded);
-
- traitNames = typeSystem.getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
- Assert.assertEquals(traitNames.size(), numTraits+1);
-
- // add again with another trait this time
- traitName = "dup_type_test" + random();
- TraitType traitTypeNew = new TraitType(typeSystem, traitName, null, null, 0);
- typesAdded.put(traitName, traitTypeNew);
-
- typeSystem.commitTypes(typesAdded);
- traitNames = typeSystem.getTypeNamesByCategory(DataTypes.TypeCategory.TRAIT);
- Assert.assertEquals(traitNames.size(), numTraits+2);
- }
-
- @Test
- public void testHierarchy() throws Exception {
- HierarchicalTypeDefinition<ClassType> testObjectDef = TypesUtil.createClassTypeDef("TestObject", ImmutableSet.<String>of(),
- createOptionalAttrDef("name", DataTypes.STRING_TYPE),
- createOptionalAttrDef("description", DataTypes.STRING_TYPE),
- createOptionalAttrDef("topAttribute", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> testDataSetDef = TypesUtil.createClassTypeDef("TestDataSet", ImmutableSet.of("TestObject"));
- HierarchicalTypeDefinition<ClassType> testColumnDef = TypesUtil.createClassTypeDef("TestColumn", ImmutableSet.of("TestObject"),
- createRequiredAttrDef("name", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> testRelationalDataSetDef =
- TypesUtil.createClassTypeDef("TestRelationalDataSet", ImmutableSet.of("TestDataSet"),
- new AttributeDefinition("columns", DataTypes.arrayTypeName("TestColumn"),
- Multiplicity.OPTIONAL, true, null));
- HierarchicalTypeDefinition<ClassType> testTableDef = TypesUtil.createClassTypeDef("TestTable", ImmutableSet.of("TestRelationalDataSet"),
- createOptionalAttrDef("schema", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> testDataFileDef = TypesUtil.createClassTypeDef("TestDataFile", ImmutableSet.of("TestRelationalDataSet"),
- createOptionalAttrDef("urlString", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> testDocumentDef = TypesUtil.createClassTypeDef("TestDocument", ImmutableSet.of("TestDataSet"),
- createOptionalAttrDef("urlString", DataTypes.STRING_TYPE),
- createOptionalAttrDef("encoding", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> testAnnotationDef =TypesUtil.createClassTypeDef("TestAnnotation", ImmutableSet.<String>of(),
- createOptionalAttrDef("inheritedAttribute", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> myNewAnnotationDef = TypesUtil.createClassTypeDef("MyNewAnnotation", ImmutableSet.of("TestAnnotation"),
- createRequiredAttrDef("myNewAnnotationAttribute", DataTypes.STRING_TYPE));
- getTypeSystem().defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(testObjectDef, testDataSetDef, testColumnDef, testRelationalDataSetDef, testTableDef, testDataFileDef, testDocumentDef, testAnnotationDef, myNewAnnotationDef));
-
- // Verify that field mappings for MyNewAnnotation contains the attribute inherited from the TestAnnotation superclass.
- // Prior to fix for ATLAS-573, the inherited attribute was missing.
- ClassType dataType = getTypeSystem().getDataType(ClassType.class, "MyNewAnnotation");
- Assert.assertTrue(dataType.fieldMapping.fields.containsKey("inheritedAttribute"));
- }
-
- @Test
- public void testRedefineExistingType() throws Exception {
- TypeSystem typeSystem = getTypeSystem();
- HierarchicalTypeDefinition<TraitType> trait = TypesUtil
- .createTraitTypeDef(random(), "description", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- typeSystem.defineTraitType(trait);
-
- try {
- typeSystem.defineTraitType(trait);
- fail("Expected TypeExistsException");
- } catch(TypeExistsException e) {
- //expected
- }
- }
-
- @Test
- public void testDuplicateNewTypenames() throws Exception {
- TypeSystem typeSystem = getTypeSystem();
- HierarchicalTypeDefinition<TraitType> trait1 = TypesUtil
- .createTraitTypeDef(random(), "description", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- // create another trait with the same name
- HierarchicalTypeDefinition<TraitType> trait2 = TypesUtil
- .createTraitTypeDef(trait1.typeName, "description", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
-
- try {
- typeSystem.defineTypes(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(trait1, trait2), ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- } catch(AtlasException e) {
- fail("Exception unexpected");
- }
- }
-
- @Test(expectedExceptions = ValueConversionException.class)
- public void testConvertInvalidDate() throws Exception {
- DataTypes.DATE_TYPE.convert("", Multiplicity.OPTIONAL);
- }
-
- @Test()
- public void testConvertValidDate() throws Exception {
- Date date = DataTypes.DATE_TYPE.convert(TEST_DATE_STRING, Multiplicity.OPTIONAL);
- Assert.assertEquals(date, new Date(TEST_DATE_IN_LONG));
-
-
- StringBuilder buf = new StringBuilder();
- DataTypes.DATE_TYPE.output(new Date(TEST_DATE_IN_LONG), buf, "", new HashSet<Date>());
- Assert.assertEquals(buf.toString(), TEST_DATE_STRING);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeUpdateBaseTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeUpdateBaseTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeUpdateBaseTest.java
deleted file mode 100644
index 4a6ed2d..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/TypeUpdateBaseTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-
-public abstract class TypeUpdateBaseTest extends BaseTest {
- protected void testTypeUpdateForAttributes() throws Exception {
- StructTypeDefinition typeDefinition =
- getTypeDefinition(newName(), TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE));
- TypeSystem ts = getTypeSystem();
- TypesDef typesDef = getTypesDef(typeDefinition);
- ts.defineTypes(typesDef);
- String typeName = typeDefinition.typeName;
-
- //Allow modifying required to optional attribute
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE));
- ts.updateTypes(getTypesDef(typeDefinition));
-
- //Allow adding new optional attribute
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("b", DataTypes.INT_TYPE));
- ts.updateTypes(getTypesDef(typeDefinition));
-
- //Don't allow adding required attribute
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("b", DataTypes.INT_TYPE),
- TypesUtil.createRequiredAttrDef("c", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(typeDefinition));
- Assert.fail("Expected TypeUpdateException");
- } catch (TypeUpdateException e) {
- //assert that type is not updated when validation fails
- Assert.assertEquals(getNumberOfFields(ts, typeDefinition.typeName), 2);
- }
-
- //Don't allow removing attribute
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(typeDefinition));
- } catch (TypeUpdateException e) {
- //expected
- }
-
- //Don't allow modifying other fields of attribute definition - optional to required
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createRequiredAttrDef("b", DataTypes.INT_TYPE));
- try {
- ts.updateTypes(getTypesDef(typeDefinition));
- } catch (TypeUpdateException e) {
- //expected
- }
-
- //Don't allow modifying other fields of attribute definition - attribute type change
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createOptionalAttrDef("a", DataTypes.INT_TYPE),
- TypesUtil.createOptionalAttrDef("b", DataTypes.STRING_TYPE));
- try {
- ts.updateTypes(getTypesDef(typeDefinition));
- } catch (TypeUpdateException e) {
- //expected
- }
-
- //Don't allow modifying other fields of attribute definition - attribute type change
- typeDefinition = getTypeDefinition(typeName, TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
- new AttributeDefinition("b", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
- Multiplicity.COLLECTION, false, null));
- try {
- ts.updateTypes(getTypesDef(typeDefinition));
- } catch (TypeUpdateException e) {
- //expected
- }
- }
-
- protected abstract int getNumberOfFields(TypeSystem ts, String typeName) throws Exception;
-
- protected abstract TypesDef getTypesDef(StructTypeDefinition typeDefinition);
-
- protected abstract StructTypeDefinition getTypeDefinition(String typeName,
- AttributeDefinition... attributeDefinitions);
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java
deleted file mode 100644
index 1a86cf3..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/ValidationTest.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSet;
-
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-public class ValidationTest {
- @DataProvider(name = "attributeData")
- private Object[][] createAttributeData() {
- return new String[][]{{null, "type"}, {"", "type"}, {"name", null}, {"name", ""}};
- }
-
- @Test(dataProvider = "attributeData", expectedExceptions = {IllegalArgumentException.class})
- public void testAttributes(String name, String type) {
- TypesUtil.createRequiredAttrDef(name, type);
- }
-
- @DataProvider(name = "enumValueData")
- private Object[][] createEnumValueData() {
- return new String[][]{{null}, {""}};
- }
-
- @Test(dataProvider = "enumValueData", expectedExceptions = {IllegalArgumentException.class})
- public void testEnumValue(String name) {
- new EnumValue(name, 1);
- }
-
- @DataProvider(name = "enumTypeData")
- private Object[][] createEnumTypeData() {
- EnumValue value = new EnumValue("name", 1);
- return new Object[][]{{null, value}, {"", value}, {"name"}};
- }
-
- @Test(dataProvider = "enumTypeData", expectedExceptions = {IllegalArgumentException.class})
- public void testEnumType(String name, EnumValue... values) {
- new EnumTypeDefinition(name, values);
- }
-
- @DataProvider(name = "structTypeData")
- private Object[][] createStructTypeData() {
- AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
- return new Object[][]{{null, value}, {"", value}, {"name"}};
- }
-
- @Test(dataProvider = "structTypeData", expectedExceptions = {IllegalArgumentException.class})
- public void testStructType(String name, AttributeDefinition... values) {
- new StructTypeDefinition(name, values);
- }
-
- @DataProvider(name = "classTypeData")
- private Object[][] createClassTypeData() {
- return new Object[][]{{null}, {""}};
- }
-
- @Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
- public void testClassType(String name) {
- AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
- TypesUtil.createClassTypeDef(name, ImmutableSet.of("super"), value);
- }
-
- @Test(dataProvider = "classTypeData", expectedExceptions = {IllegalArgumentException.class})
- public void testTraitType(String name) {
- AttributeDefinition value = TypesUtil.createRequiredAttrDef("name", "type");
- TypesUtil.createTraitTypeDef(name, ImmutableSet.of("super"), value);
- }
-
- @Test
- public void testValidTypes() {
- AttributeDefinition attribute = TypesUtil.createRequiredAttrDef("name", "type");
-
- //class with no attributes
- TypesUtil.createClassTypeDef("name", ImmutableSet.of("super"));
-
- //class with no super types
- TypesUtil.createClassTypeDef("name", ImmutableSet.<String>of(), attribute);
-
- //trait with no attributes
- TypesUtil.createTraitTypeDef("name", ImmutableSet.of("super"));
-
- //trait with no super types
- TypesUtil.createTraitTypeDef("name", ImmutableSet.<String>of(), attribute);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCacheTest.java
----------------------------------------------------------------------
diff --git a/typesystem/src/test/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCacheTest.java b/typesystem/src/test/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCacheTest.java
deleted file mode 100644
index 5c397dd..0000000
--- a/typesystem/src/test/java/org/apache/atlas/typesystem/types/cache/DefaultTypeCacheTest.java
+++ /dev/null
@@ -1,450 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types.cache;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumType;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertFalse;
-import static org.testng.Assert.assertNotNull;
-import static org.testng.Assert.assertNull;
-import static org.testng.Assert.assertTrue;
-import static org.testng.Assert.fail;
-
-/**
- * Tests functional behavior of {@link DefaultTypeCache}
- */
-@SuppressWarnings("rawtypes")
-public class DefaultTypeCacheTest {
-
- private String CLASSTYPE_CUSTOMER = "Customer";
- private String STRUCTTYPE_ADDRESS = "Address";
- private String TRAITTYPE_PRIVILEGED = "Privileged";
- private String ENUMTYPE_SHIPPING = "Shipping";
-
- private String UNKNOWN_TYPE = "UndefinedType";
-
- private ClassType customerType;
- private StructType addressType;
- private TraitType privilegedTrait;
- private EnumType shippingEnum;
-
- private DefaultTypeCache cache;
-
- @BeforeClass
- public void onetimeSetup() throws Exception {
-
- // init TypeSystem
- TypeSystem ts = TypeSystem.getInstance().reset();
-
- // Customer ClassType
- customerType = ts.defineClassType(TypesUtil
- .createClassTypeDef(CLASSTYPE_CUSTOMER, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("id", DataTypes.LONG_TYPE)));
-
- // Address StructType
- addressType = ts.defineStructType(STRUCTTYPE_ADDRESS, true,
- TypesUtil.createRequiredAttrDef("first line", DataTypes.STRING_TYPE),
- TypesUtil.createOptionalAttrDef("second line", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("city", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("pincode", DataTypes.INT_TYPE));
-
- // Privileged TraitType
- privilegedTrait = ts.defineTraitType(TypesUtil
- .createTraitTypeDef(TRAITTYPE_PRIVILEGED, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("category", DataTypes.INT_TYPE)));
-
- // Shipping EnumType
- shippingEnum = ts.defineEnumType(TypesUtil.createEnumTypeDef(ENUMTYPE_SHIPPING,
- new EnumValue("Domestic", 1), new EnumValue("International", 2)));
- }
-
- @BeforeMethod
- public void eachTestSetup() throws Exception {
-
- cache = new DefaultTypeCache();
-
- cache.put(customerType);
- cache.put(addressType);
- cache.put(privilegedTrait);
- cache.put(shippingEnum);
- }
-
- @Test
- public void testCacheGetType() throws Exception {
-
- IDataType custType = cache.get(CLASSTYPE_CUSTOMER);
- verifyType(custType, CLASSTYPE_CUSTOMER, ClassType.class);
-
- IDataType addrType = cache.get(STRUCTTYPE_ADDRESS);
- verifyType(addrType, STRUCTTYPE_ADDRESS, StructType.class);
-
- IDataType privTrait = cache.get(TRAITTYPE_PRIVILEGED);
- verifyType(privTrait, TRAITTYPE_PRIVILEGED, TraitType.class);
-
- IDataType shippingEnum = cache.get(ENUMTYPE_SHIPPING);
- verifyType(shippingEnum, ENUMTYPE_SHIPPING, EnumType.class);
-
- assertNull(cache.get(UNKNOWN_TYPE));
- }
-
- @Test
- public void testCacheGetTypeByCategory() throws Exception {
-
- IDataType custType = cache.get(TypeCategory.CLASS, CLASSTYPE_CUSTOMER);
- verifyType(custType, CLASSTYPE_CUSTOMER, ClassType.class);
-
- IDataType addrType = cache.get(TypeCategory.STRUCT, STRUCTTYPE_ADDRESS);
- verifyType(addrType, STRUCTTYPE_ADDRESS, StructType.class);
-
- IDataType privTrait = cache.get(TypeCategory.TRAIT, TRAITTYPE_PRIVILEGED);
- verifyType(privTrait, TRAITTYPE_PRIVILEGED, TraitType.class);
-
- IDataType shippingEnum = cache.get(TypeCategory.ENUM, ENUMTYPE_SHIPPING);
- verifyType(shippingEnum, ENUMTYPE_SHIPPING, EnumType.class);
-
- assertNull(cache.get(UNKNOWN_TYPE));
- }
-
- private void verifyType(IDataType actualType, String expectedName, Class<? extends IDataType> typeClass) {
-
- assertNotNull(actualType, "The " + expectedName + " type not in cache");
- assertTrue(typeClass.isInstance(actualType));
- assertEquals(actualType.getName(), expectedName, "The type name does not match");
- }
-
- @Test
- public void testCacheHasType() throws Exception {
-
- assertTrue(cache.has(CLASSTYPE_CUSTOMER));
- assertTrue(cache.has(STRUCTTYPE_ADDRESS));
- assertTrue(cache.has(TRAITTYPE_PRIVILEGED));
- assertTrue(cache.has(ENUMTYPE_SHIPPING));
-
- assertFalse(cache.has(UNKNOWN_TYPE));
- }
-
- @Test
- public void testCacheHasTypeByCategory() throws Exception {
-
- assertTrue(cache.has(TypeCategory.CLASS, CLASSTYPE_CUSTOMER));
- assertTrue(cache.has(TypeCategory.STRUCT, STRUCTTYPE_ADDRESS));
- assertTrue(cache.has(TypeCategory.TRAIT, TRAITTYPE_PRIVILEGED));
- assertTrue(cache.has(TypeCategory.ENUM, ENUMTYPE_SHIPPING));
-
- assertFalse(cache.has(UNKNOWN_TYPE));
- }
-
- @Test
- public void testCacheGetAllTypeNames() throws Exception {
-
- List<String> allTypeNames = new ArrayList<>(cache.getAllTypeNames());
- Collections.sort(allTypeNames);
-
- final int EXPECTED_TYPE_COUNT = 4;
- assertEquals(allTypeNames.size(), EXPECTED_TYPE_COUNT, "Total number of types does not match.");
-
- assertEquals(STRUCTTYPE_ADDRESS, allTypeNames.get(0));
- assertEquals(CLASSTYPE_CUSTOMER, allTypeNames.get(1));
- assertEquals(TRAITTYPE_PRIVILEGED, allTypeNames.get(2));
- assertEquals(ENUMTYPE_SHIPPING, allTypeNames.get(3));
- }
-
- private Collection<String> getTypeNamesByCategory(final TypeCategory category)
- throws AtlasException {
- return cache.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.CATEGORY, category.name());
- }});
- }
-
- @Test
- public void testCacheGetTypeNamesByCategory() throws Exception {
- List<String> classTypes = new ArrayList(getTypeNamesByCategory(TypeCategory.CLASS));
- final int EXPECTED_CLASSTYPE_COUNT = 1;
- assertEquals(classTypes.size(), EXPECTED_CLASSTYPE_COUNT);
- assertEquals(CLASSTYPE_CUSTOMER, classTypes.get(0));
-
- List<String> structTypes = new ArrayList(getTypeNamesByCategory(TypeCategory.STRUCT));
- final int EXPECTED_STRUCTTYPE_COUNT = 1;
- assertEquals(structTypes.size(), EXPECTED_STRUCTTYPE_COUNT);
- assertEquals(STRUCTTYPE_ADDRESS, structTypes.get(0));
-
- List<String> traitTypes = new ArrayList(getTypeNamesByCategory(TypeCategory.TRAIT));
- final int EXPECTED_TRAITTYPE_COUNT = 1;
- assertEquals(traitTypes.size(), EXPECTED_TRAITTYPE_COUNT);
- assertEquals(TRAITTYPE_PRIVILEGED, traitTypes.get(0));
-
- List<String> enumTypes = new ArrayList(getTypeNamesByCategory(TypeCategory.ENUM));
- final int EXPECTED_ENUMTYPE_COUNT = 1;
- assertEquals(enumTypes.size(), EXPECTED_ENUMTYPE_COUNT);
- assertEquals(ENUMTYPE_SHIPPING, enumTypes.get(0));
- }
-
- @Test
- public void testCacheBulkInsert() throws Exception {
-
- List<IDataType> allTypes = new ArrayList<>();
- allTypes.add(customerType);
- allTypes.add(addressType);
- allTypes.add(privilegedTrait);
- allTypes.add(shippingEnum);
-
- // create a new cache instead of using the one setup for every method call
- cache = new DefaultTypeCache();
- cache.putAll(allTypes);
-
- IDataType custType = cache.get(CLASSTYPE_CUSTOMER);
- verifyType(custType, CLASSTYPE_CUSTOMER, ClassType.class);
-
- IDataType addrType = cache.get(STRUCTTYPE_ADDRESS);
- verifyType(addrType, STRUCTTYPE_ADDRESS, StructType.class);
-
- IDataType privTrait = cache.get(TRAITTYPE_PRIVILEGED);
- verifyType(privTrait, TRAITTYPE_PRIVILEGED, TraitType.class);
-
- IDataType shippingEnum = cache.get(ENUMTYPE_SHIPPING);
- verifyType(shippingEnum, ENUMTYPE_SHIPPING, EnumType.class);
- }
-
- @Test
- public void testCacheRemove() throws Exception {
- cache.remove(CLASSTYPE_CUSTOMER);
- assertNull(cache.get(CLASSTYPE_CUSTOMER));
- assertFalse(cache.has(CLASSTYPE_CUSTOMER));
- assertTrue(getTypeNamesByCategory(TypeCategory.CLASS).isEmpty());
-
- final int EXPECTED_TYPE_COUNT = 3;
- assertEquals(cache.getAllTypeNames().size(), EXPECTED_TYPE_COUNT);
- }
-
- @Test
- public void testCacheRemoveByCategory() throws Exception {
-
- cache.remove(TypeCategory.CLASS, CLASSTYPE_CUSTOMER);
- assertNull(cache.get(CLASSTYPE_CUSTOMER));
- assertFalse(cache.has(CLASSTYPE_CUSTOMER));
- assertTrue(getTypeNamesByCategory(TypeCategory.CLASS).isEmpty());
-
- final int EXPECTED_TYPE_COUNT = 3;
- assertEquals(cache.getAllTypeNames().size(), EXPECTED_TYPE_COUNT);
- }
-
- @Test
- public void testCacheClear() throws Exception {
-
- cache.clear();
-
- assertNull(cache.get(CLASSTYPE_CUSTOMER));
- assertFalse(cache.has(CLASSTYPE_CUSTOMER));
-
- assertNull(cache.get(STRUCTTYPE_ADDRESS));
- assertFalse(cache.has(STRUCTTYPE_ADDRESS));
-
- assertNull(cache.get(TRAITTYPE_PRIVILEGED));
- assertFalse(cache.has(TRAITTYPE_PRIVILEGED));
-
- assertNull(cache.get(ENUMTYPE_SHIPPING));
- assertFalse(cache.has(ENUMTYPE_SHIPPING));
-
- assertTrue(getTypeNamesByCategory(TypeCategory.CLASS).isEmpty());
- assertTrue(getTypeNamesByCategory(TypeCategory.STRUCT).isEmpty());
- assertTrue(getTypeNamesByCategory(TypeCategory.TRAIT).isEmpty());
- assertTrue(getTypeNamesByCategory(TypeCategory.ENUM).isEmpty());
-
- assertTrue(cache.getAllTypeNames().isEmpty());
- }
-
- @Test(expectedExceptions = AtlasException.class)
- public void testPutTypeWithNullType() throws Exception {
-
- cache.put(null);
- fail("Null type should be not allowed in 'put'");
- }
-
- @Test(expectedExceptions = AtlasException.class)
- public void testPutTypeWithInvalidType() throws Exception {
-
- cache.put(DataTypes.BOOLEAN_TYPE);
- fail("type should only be an instance of ClassType | EnumType | StructType | TraitType in 'put'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testGetTypeWithNullCategory() throws Exception {
-
- cache.get(null, CLASSTYPE_CUSTOMER);
- fail("Null TypeCategory should be not allowed in 'get'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testGetTypeWithInvalidCategory() throws Exception {
-
- cache.get(TypeCategory.PRIMITIVE, DataTypes.BOOLEAN_TYPE.getName());
- fail("TypeCategory should only be one of TypeCategory.CLASS | ENUM | STRUCT | TRAIT in 'get'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testCacheHasTypeWithNullCategory() throws Exception {
-
- cache.has(null, CLASSTYPE_CUSTOMER);
- fail("Null TypeCategory should be not allowed in 'has'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testCacheHasTypeWithInvalidCategory() throws Exception {
-
- cache.has(TypeCategory.PRIMITIVE, DataTypes.BOOLEAN_TYPE.getName());
- fail("TypeCategory should only be one of TypeCategory.CLASS | ENUM | STRUCT | TRAIT in 'has'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testCacheGetTypeNamesByInvalidCategory() throws Exception {
- getTypeNamesByCategory(TypeCategory.PRIMITIVE);
- fail("TypeCategory should only be one of TypeCategory.CLASS | ENUM | STRUCT | TRAIT in 'getNames'");
- }
-
- @Test(expectedExceptions = AtlasException.class)
- public void testCacheBulkInsertWithNullType() throws Exception {
-
- List<IDataType> allTypes = new ArrayList<>();
- allTypes.add(null);
-
- // create a new cache instead of using the one setup for every method call
- cache = new DefaultTypeCache();
- cache.putAll(allTypes);
-
- fail("Null type should be not allowed in 'putAll'");
- }
-
- @Test(expectedExceptions = AtlasException.class)
- public void testCacheBulkInsertWithInvalidType() throws Exception {
-
- List<IDataType> allTypes = new ArrayList<>();
- allTypes.add(DataTypes.BOOLEAN_TYPE);
-
- // create a new cache instead of using the one setup for every method call
- cache = new DefaultTypeCache();
- cache.putAll(allTypes);
-
- fail("type should only one of ClassType | EnumType | StructType | TraitType in 'putAll'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testCacheRemoveByNullCategory() throws Exception {
-
- cache.remove(null, CLASSTYPE_CUSTOMER);
- fail("Null type should be not allowed in 'remove'");
- }
-
- @Test(expectedExceptions = IllegalArgumentException.class)
- public void testCacheRemoveByInvalidCategory() throws Exception {
-
- cache.remove(TypeCategory.PRIMITIVE, DataTypes.BOOLEAN_TYPE.getName());
- fail("TypeCategory should only be one of TypeCategory.CLASS | ENUM | STRUCT | TRAIT in 'remove'");
- }
-
- @Test
- public void testGetTypesByFilter() throws Exception {
- // init TypeSystem
- TypeSystem ts = TypeSystem.getInstance().reset();
-
- ts.defineClassType(TypesUtil.createClassTypeDef("A", ImmutableSet.<String>of()));
- ts.defineClassType(TypesUtil.createClassTypeDef("A1", ImmutableSet.of("A")));
-
- ts.defineClassType(TypesUtil.createClassTypeDef("B", ImmutableSet.<String>of()));
-
- ts.defineClassType(TypesUtil.createClassTypeDef("C", ImmutableSet.of("B", "A")));
-
- //supertype ~ A
- ImmutableList<String> results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.SUPERTYPE, "A");
- }});
- assertTrue(results.containsAll(Arrays.asList("A1", "C")), "Results: " + results);
-
- //!supertype doesn't return the type itself
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.NOT_SUPERTYPE, "A");
- }});
- assertTrue(results.containsAll(Arrays.asList("B")), "Results: " + results);
-
- //supertype ~ A && supertype !~ B
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.SUPERTYPE, "A");
- put(TypeCache.TYPE_FILTER.NOT_SUPERTYPE, "B");
- }});
- assertTrue(results.containsAll(Arrays.asList("A1")), "Results: " + results);
-
- //none of category trait
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.CATEGORY, TypeCategory.TRAIT.name());
- put(TypeCache.TYPE_FILTER.SUPERTYPE, "A");
- }});
- assertTrue(results.isEmpty(), "Results: " + results);
-
- //no filter returns all types
- results = ts.getTypeNames(null);
- assertTrue(results.containsAll(Arrays.asList("A", "A1", "B", "C")), "Results: " + results);
-
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>());
- assertTrue(results.containsAll(Arrays.asList("A", "A1", "B", "C")), "Results: " + results);
-
- //invalid category
- try {
- ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.CATEGORY, "A");
- }});
- fail("Expected IllegalArgumentException");
- } catch (IllegalArgumentException e) {
- //expected
- }
-
- //invalid supertype
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.SUPERTYPE, "X");
- }});
- assertTrue(results.isEmpty(), "Expected empty result for non-existent type 'X'. Found: " + results);
-
- //invalid supertype
- results = ts.getTypeNames(new HashMap<TypeCache.TYPE_FILTER, String>() {{
- put(TypeCache.TYPE_FILTER.NOT_SUPERTYPE, "X");
- }});
- assertTrue(results.containsAll(Arrays.asList("A", "A1", "B", "C")), "Results: " + results);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/resources/atlas-application.properties
----------------------------------------------------------------------
diff --git a/typesystem/src/test/resources/atlas-application.properties b/typesystem/src/test/resources/atlas-application.properties
deleted file mode 100644
index b937c33..0000000
--- a/typesystem/src/test/resources/atlas-application.properties
+++ /dev/null
@@ -1,145 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#system property
-atlas.data=${sys:user.dir}/target/data
-
-
-
-#re-use existing property
-atlas.graph.data=${atlas.data}/graph
-
-#plain property
-atlas.service=atlas
-
-#invalid system property
-atlas.db=${atlasdb}
-
-atlas.TypeSystem.impl=org.apache.atlas.typesystem.types.TypeSystem
-
-
-
-######### Atlas Server Configs #########
-atlas.rest.address=http://localhost:31000
-
-######### Graph Database Configs #########
-
-
-# Graph database implementation. Value inserted by maven.
-atlas.graphdb.backend=${graphdb.backend.impl}
-
-# Graph Storage
-atlas.graph.storage.backend=${graph.storage.backend}
-
-# Entity repository implementation
-atlas.EntityAuditRepository.impl=${entity.repository.impl}
-
-# Graph Search Index Backend
-atlas.graph.index.search.backend=${graph.index.backend}
-
-#Berkeley storage directory
-atlas.graph.storage.directory=${sys:atlas.data}/berkley
-
-#hbase
-#For standalone mode , specify localhost
-#for distributed mode, specify zookeeper quorum here - For more information refer http://s3.thinkaurelius.com/docs/titan/current/hbase.html#_remote_server_mode_2
-
-atlas.graph.storage.hostname=${graph.storage.hostname}
-atlas.graph.storage.hbase.regions-per-server=1
-atlas.graph.storage.lock.wait-time=10000
-
-#ElasticSearch
-atlas.graph.index.search.directory=${sys:atlas.data}/es
-atlas.graph.index.search.elasticsearch.client-only=false
-atlas.graph.index.search.elasticsearch.local-mode=true
-atlas.graph.index.search.elasticsearch.create.sleep=2000
-
-# Solr cloud mode properties
-atlas.graph.index.search.solr.mode=cloud
-atlas.graph.index.search.solr.zookeeper-url=${solr.zk.address}
-atlas.graph.index.search.max-result-set-size=150
-
-######### Hive Lineage Configs #########
-## Schema
-atlas.lineage.schema.query.hive_table=hive_table where __guid='%s'\, columns
-atlas.lineage.schema.query.hive_table_v1=hive_table_v1 where __guid='%s'\, columns
-
-######### Notification Configs #########
-atlas.notification.embedded=true
-
-atlas.kafka.zookeeper.connect=localhost:19026
-atlas.kafka.bootstrap.servers=localhost:19027
-atlas.kafka.data=${sys:atlas.data}/kafka
-atlas.kafka.zookeeper.session.timeout.ms=4000
-atlas.kafka.zookeeper.sync.time.ms=20
-atlas.kafka.consumer.timeout.ms=4000
-atlas.kafka.auto.commit.interval.ms=100
-atlas.kafka.hook.group.id=atlas
-atlas.kafka.entities.group.id=atlas_entities
-#atlas.kafka.auto.commit.enable=false
-
-atlas.kafka.enable.auto.commit=false
-atlas.kafka.auto.offset.reset=earliest
-atlas.kafka.session.timeout.ms=30000
-
-
-
-######### Entity Audit Configs #########
-atlas.audit.hbase.tablename=ATLAS_ENTITY_AUDIT_EVENTS
-atlas.audit.zookeeper.session.timeout.ms=1000
-atlas.audit.hbase.zookeeper.quorum=localhost
-atlas.audit.hbase.zookeeper.property.clientPort=19026
-
-######### Security Properties #########
-
-# SSL config
-atlas.enableTLS=false
-atlas.server.https.port=31443
-
-######### Security Properties #########
-
-hbase.security.authentication=simple
-
-atlas.hook.falcon.synchronous=true
-
-######### JAAS Configuration ########
-
-atlas.jaas.KafkaClient.loginModuleName = com.sun.security.auth.module.Krb5LoginModule
-atlas.jaas.KafkaClient.loginModuleControlFlag = required
-atlas.jaas.KafkaClient.option.useKeyTab = true
-atlas.jaas.KafkaClient.option.storeKey = true
-atlas.jaas.KafkaClient.option.serviceName = kafka
-atlas.jaas.KafkaClient.option.keyTab = /etc/security/keytabs/atlas.service.keytab
-atlas.jaas.KafkaClient.option.principal = atlas/_HOST@EXAMPLE.COM
-
-######### High Availability Configuration ########
-atlas.server.ha.enabled=false
-#atlas.server.ids=id1
-#atlas.server.address.id1=localhost:21000
-
-#########POLICY FILE PATH #########
-# atlas.auth.policy.file=policy-store.txt
-
-atlas.authentication.method.file=true
-atlas.authentication.method.ldap.type=none
-# atlas.authentication.method.file.filename=users-credentials.properties
-atlas.authentication.method.kerberos=false
-
-######### Gremlin Search Configuration #########
-# Set to false to disable gremlin search.
-atlas.search.gremlin.enable=true
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/resources/policy-store.txt
----------------------------------------------------------------------
diff --git a/typesystem/src/test/resources/policy-store.txt b/typesystem/src/test/resources/policy-store.txt
deleted file mode 100644
index 048affe..0000000
--- a/typesystem/src/test/resources/policy-store.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-##Policy Format
-##r-READ, w-WRITE, u-UPDATE, d-DELETE
-##Policy_Name;;User_Name1:Operations_Allowed,User_Name2:Operations_Allowed;;Group_Name1:Operations_Allowed,Group_Name2:Operations_Allowed;;Resource_Type1:Resource_Name,Resource_Type2:Resource_Name
-##
-adminPolicy;;admin:rwud;;ROLE_ADMIN:rwud;;type:*,entity:*,operation:*
-dataScientistPolicy;;;;DATA_SCIENTIST:r;;type:*,entity:*
-dataStewardPolicy;;;;DATA_STEWARD:rwu;;type:*,entity:*
-hadoopPolicy;;;;hadoop:rwud;;type:*,entity:*,operation:*
-rangerTagSyncPolicy;;;;RANGER_TAG_SYNC:r;;type:*,entity:*
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/resources/sampleInstance.json
----------------------------------------------------------------------
diff --git a/typesystem/src/test/resources/sampleInstance.json b/typesystem/src/test/resources/sampleInstance.json
deleted file mode 100755
index efcc48d..0000000
--- a/typesystem/src/test/resources/sampleInstance.json
+++ /dev/null
@@ -1,72 +0,0 @@
-{
- "$typeName$": "Department",
- "$id$": {
- "id": -1420494283853484000,
- "$typeName$": "Department",
- "version": 0
- },
- "employees": [
- {
- "$typeName$": "Person",
- "$id$": {
- "id": -1420494283853508000,
- "$typeName$": "Person",
- "version": 0
- },
- "manager": {
- "id": -1420494283853511000,
- "$typeName$": "Manager",
- "version": 0
- },
- "department": {
- "id": -1420494283853484000,
- "$typeName$": "Department",
- "version": 0
- },
- "name": "John"
- },
- {
- "$typeName$": "Manager",
- "$id$": {
- "id": -1420494283853511000,
- "$typeName$": "Manager",
- "version": 0
- },
- "manager": null,
- "subordinates": [
- {
- "$typeName$": "Person",
- "$id$": {
- "id": -1420494283853508000,
- "$typeName$": "Person",
- "version": 0
- },
- "manager": {
- "id": -1420494283853511000,
- "$typeName$": "Manager",
- "version": 0
- },
- "department": {
- "id": -1420494283853484000,
- "$typeName$": "Department",
- "version": 0
- },
- "name": "John"
- }
- ],
- "department": {
- "id": -1420494283853484000,
- "$typeName$": "Department",
- "version": 0
- },
- "name": "Jane",
- "$traits$": {
- "SecurityClearance": {
- "$typeName$": "SecurityClearance",
- "level": 1
- }
- }
- }
- ],
- "name": "hr"
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/resources/sampleTypes.json
----------------------------------------------------------------------
diff --git a/typesystem/src/test/resources/sampleTypes.json b/typesystem/src/test/resources/sampleTypes.json
deleted file mode 100755
index d0ac311..0000000
--- a/typesystem/src/test/resources/sampleTypes.json
+++ /dev/null
@@ -1,633 +0,0 @@
-{
- "enumTypes": [
- {
- "name": "HiveObjectType",
- "enumValues": [
- {
- "value": "GLOBAL",
- "ordinal": 1
- },
- {
- "value": "DATABASE",
- "ordinal": 2
- },
- {
- "value": "TABLE",
- "ordinal": 3
- },
- {
- "value": "PARTITION",
- "ordinal": 4
- },
- {
- "value": "COLUMN",
- "ordinal": 5
- }
- ]
- },
- {
- "name": "LockLevel",
- "enumValues": [
- {
- "value": "DB",
- "ordinal": 1
- },
- {
- "value": "TABLE",
- "ordinal": 2
- },
- {
- "value": "PARTITION",
- "ordinal": 3
- }
- ]
- },
- {
- "name": "TxnState",
- "enumValues": [
- {
- "value": "COMMITTED",
- "ordinal": 1
- },
- {
- "value": "ABORTED",
- "ordinal": 2
- },
- {
- "value": "OPEN",
- "ordinal": 3
- }
- ]
- },
- {
- "name": "PrincipalType",
- "enumValues": [
- {
- "value": "USER",
- "ordinal": 1
- },
- {
- "value": "ROLE",
- "ordinal": 2
- },
- {
- "value": "GROUP",
- "ordinal": 3
- }
- ]
- }
- ],
- "structTypes": [
- {
- "typeName": "t2",
- "attributeDefinitions": [
- {
- "name": "a",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "s",
- "dataTypeName": "t2",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "typeName": "t1",
- "attributeDefinitions": [
- {
- "name": "a",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "b",
- "dataTypeName": "boolean",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "c",
- "dataTypeName": "byte",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "d",
- "dataTypeName": "short",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "e",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "f",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "g",
- "dataTypeName": "long",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "h",
- "dataTypeName": "float",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "i",
- "dataTypeName": "double",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "j",
- "dataTypeName": "biginteger",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "k",
- "dataTypeName": "bigdecimal",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "l",
- "dataTypeName": "date",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "m",
- "dataTypeName": "array<int>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "n",
- "dataTypeName": "array<bigdecimal>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "o",
- "dataTypeName": "map<string,double>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "typeName": "ts1",
- "attributeDefinitions": [
- {
- "name": "a",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "b",
- "dataTypeName": "boolean",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "c",
- "dataTypeName": "byte",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "d",
- "dataTypeName": "short",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "e",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "f",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "g",
- "dataTypeName": "long",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "h",
- "dataTypeName": "float",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "i",
- "dataTypeName": "double",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "j",
- "dataTypeName": "biginteger",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "k",
- "dataTypeName": "bigdecimal",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "l",
- "dataTypeName": "date",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "m",
- "dataTypeName": "array<int>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "n",
- "dataTypeName": "array<bigdecimal>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "o",
- "dataTypeName": "map<string,double>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- }
- ],
- "traitTypes": [
- {
- "superTypes": [
- "B",
- "C"
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
- "typeName": "D",
- "attributeDefinitions": [
- {
- "name": "d",
- "dataTypeName": "short",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "superTypes": [
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
- "typeName": "A",
- "attributeDefinitions": [
- {
- "name": "a",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "b",
- "dataTypeName": "boolean",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "c",
- "dataTypeName": "byte",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "d",
- "dataTypeName": "short",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "superTypes": [
- "A"
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
- "typeName": "B",
- "attributeDefinitions": [
- {
- "name": "b",
- "dataTypeName": "boolean",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "superTypes": [
- "A"
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
- "typeName": "C",
- "attributeDefinitions": [
- {
- "name": "c",
- "dataTypeName": "byte",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "superTypes": [
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.TraitType",
- "typeName": "SecurityClearance",
- "attributeDefinitions": [
- {
- "name": "level",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- }
- ],
- "classTypes": [
- {
- "superTypes": [
- "Person"
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
- "typeName": "Manager",
- "attributeDefinitions": [
- {
- "name": "subordinates",
- "dataTypeName": "array<Person>",
- "multiplicity": "collection",
- "isComposite": false,
- "reverseAttributeName": "manager"
- }
- ]
- },
- {
- "superTypes": [
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
- "typeName": "Department",
- "attributeDefinitions": [
- {
- "name": "name",
- "dataTypeName": "string",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "employees",
- "dataTypeName": "array<Person>",
- "multiplicity": "collection",
- "isComposite": true,
- "reverseAttributeName": "department"
- }
- ]
- },
- {
- "superTypes": [
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
- "typeName": "t4",
- "attributeDefinitions": [
- {
- "name": "a",
- "dataTypeName": "int",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "b",
- "dataTypeName": "boolean",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "c",
- "dataTypeName": "byte",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "d",
- "dataTypeName": "short",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "enum1",
- "dataTypeName": "HiveObjectType",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "e",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "f",
- "dataTypeName": "int",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "g",
- "dataTypeName": "long",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "enum2",
- "dataTypeName": "PrincipalType",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "h",
- "dataTypeName": "float",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "i",
- "dataTypeName": "double",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "j",
- "dataTypeName": "biginteger",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "k",
- "dataTypeName": "bigdecimal",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "enum3",
- "dataTypeName": "TxnState",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "l",
- "dataTypeName": "date",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "m",
- "dataTypeName": "array<int>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "n",
- "dataTypeName": "array<bigdecimal>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "o",
- "dataTypeName": "map<string,double>",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "enum4",
- "dataTypeName": "LockLevel",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": null
- }
- ]
- },
- {
- "superTypes": [
- ],
- "hierarchicalMetaTypeName": "org.apache.atlas.types.ClassType",
- "typeName": "Person",
- "attributeDefinitions": [
- {
- "name": "name",
- "dataTypeName": "string",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": null
- },
- {
- "name": "department",
- "dataTypeName": "Department",
- "multiplicity": "required",
- "isComposite": false,
- "reverseAttributeName": "employees"
- },
- {
- "name": "manager",
- "dataTypeName": "Manager",
- "multiplicity": "optional",
- "isComposite": false,
- "reverseAttributeName": "subordinates"
- }
- ]
- }
- ]
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/resources/users-credentials.properties
----------------------------------------------------------------------
diff --git a/typesystem/src/test/resources/users-credentials.properties b/typesystem/src/test/resources/users-credentials.properties
deleted file mode 100644
index 3fc3bb1..0000000
--- a/typesystem/src/test/resources/users-credentials.properties
+++ /dev/null
@@ -1,3 +0,0 @@
-#username=group::sha256-password
-admin=ADMIN::8c6976e5b5410415bde908bd4dee15dfb167a9c873fc4bb8a81f6f2ab448a918
-rangertagsync=RANGER_TAG_SYNC::e3f67240f5117d1753c940dae9eea772d36ed5fe9bd9c94a300e40413f1afb9d
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala
deleted file mode 100644
index 9d1d00f..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/BuilderTest.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.typesystem.TypesDef
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.testng.annotations.BeforeMethod
-
-abstract class BuilderTest {
-
- var tDef : TypesDef = null
-
- @BeforeMethod
- def before {
- TypeSystem.getInstance().reset()
-
- val b = new TypesBuilder
- import b._
-
- tDef = types {
-
- _trait("Dimension") {}
- _trait("PII") {}
- _trait("Metric") {}
- _trait("ETL") {}
- _trait("JdbcAccess") {}
-
- _class("DB") {
- "name" ~ (string, required, indexed, unique)
- "owner" ~ (string)
- "createTime" ~ (int)
- }
-
- _class("StorageDesc") {
- "inputFormat" ~ (string, required)
- "outputFormat" ~ (string, required)
- }
-
- _class("Column") {
- "name" ~ (string, required)
- "dataType" ~ (string, required)
- "sd" ~ ("StorageDesc", required)
- }
-
- _class("Table", List()) {
- "name" ~ (string, required, indexed)
- "db" ~ ("DB", required)
- "sd" ~ ("StorageDesc", required)
- }
-
- _class("LoadProcess") {
- "name" ~ (string, required)
- "inputTables" ~ (array("Table"), collection)
- "outputTable" ~ ("Table", required)
-
- }
-
- _class("View") {
- "name" ~ (string, required)
- "inputTables" ~ (array("Table"), collection)
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala
deleted file mode 100644
index 0331c9c..0000000
--- a/typesystem/src/test/scala/org/apache/atlas/typesystem/builders/InstanceBuilderTest.scala
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.builders
-
-import org.apache.atlas.typesystem.types.{ClassType, Multiplicity, TypeSystem}
-import org.testng.annotations.Test
-
-class InstanceBuilderTest extends BuilderTest {
-
- @Test def test1 {
- TypeSystem.getInstance().defineTypes(tDef)
-
- val b = new InstanceBuilder
- import b._
-
- val instances = b create {
-
- val salesDB = instance("DB") { // use instance to create Referenceables. use closure to
- // set attributes of instance
- 'name ~ "Sales" // use '~' to set attributes. Use a Symbol (names starting with ') for
- // attribute names.
- 'owner ~ "John ETL"
- 'createTime ~ 1000
- }
-
- val salesFact = instance("Table") {
- 'name ~ "sales_fact"
- 'db ~ salesDB
- val sd = instance("StorageDesc") { // any valid scala allowed in closure.
- 'inputFormat ~ "TextIputFormat"
- 'outputFormat ~ "TextOutputFormat"
- }
- 'sd ~ sd // use ~ to set references, collections and maps.
- val columns = Seq(
- instance("Column") {
- 'name ~ "time_id"
- 'dataType ~ "int"
- 'sd ~ sd
- },
- instance("Column") {
- 'name ~ "product_id"
- 'dataType ~ "int"
- 'sd ~ sd
- },
- instance("Column") {
- 'name ~ "customer_id"
- 'dataType ~ "int"
- 'sd ~ sd
- },
- instance("Column", "Metric") {
- 'name ~ "sales"
- 'dataType ~ "int"
- 'sd ~ sd
- 'Metric("x") ~ 1 // use 'TraitName("attrName") to set values on traits.
- }
- )
-
- 'columns ~ columns
-
- }
-
- salesFact.sd.inputFormat ~ "TextInputFormat" // use dot navigation to alter attributes in the object graph.
- // here I am fixing the typo in "TextInputFormat"
- // dot navigation also works for arrays.
- // here I am fixing column(3). Metric trait has no attributes.
- val c = salesFact.columns
- c(3) = instance("Column", "Metric") {
- 'name ~ "sales"
- 'dataType ~ "int"
- 'sd ~ salesFact.sd
- }
-
- }
-
- val ts = TypeSystem.getInstance()
-
- import scala.collection.JavaConversions._
- val typedInstances = instances.map { i =>
- val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName)
- iTyp.convert(i, Multiplicity.REQUIRED)
- }
-
- typedInstances.foreach { i =>
- println(i)
- }
-
- }
-
-}
[10/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
deleted file mode 100755
index f9f4abe..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/DataTypes.java
+++ /dev/null
@@ -1,655 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.commons.lang3.StringUtils;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.format.ISODateTimeFormat;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class DataTypes {
-
- public static BooleanType BOOLEAN_TYPE = new BooleanType();
- public static ByteType BYTE_TYPE = new ByteType();
- public static ShortType SHORT_TYPE = new ShortType();
- public static IntType INT_TYPE = new IntType();
- public static LongType LONG_TYPE = new LongType();
- public static FloatType FLOAT_TYPE = new FloatType();
- public static DoubleType DOUBLE_TYPE = new DoubleType();
- public static BigIntegerType BIGINTEGER_TYPE = new BigIntegerType();
- public static BigDecimalType BIGDECIMAL_TYPE = new BigDecimalType();
- public static DateType DATE_TYPE = new DateType();
- public static StringType STRING_TYPE = new StringType();
- public static String ARRAY_TYPE_PREFIX = "array<";
- static String ARRAY_TYPE_SUFFIX = ">";
- public static String MAP_TYPE_PREFIX = "map<";
- static String MAP_TYPE_SUFFIX = ">";
-
- public static String arrayTypeName(String elemTypeName) {
- assert elemTypeName != null;
- return String.format("%s%s%s", ARRAY_TYPE_PREFIX, elemTypeName, ARRAY_TYPE_SUFFIX);
- }
-
- public static String arrayTypeName(IDataType elemType) {
- return arrayTypeName(elemType.getName());
- }
-
- public static String mapTypeName(String keyTypeName, String valueTypeName) {
- return String.format("%s%s,%s%s", MAP_TYPE_PREFIX, keyTypeName, valueTypeName, MAP_TYPE_SUFFIX);
- }
-
- public static String mapTypeName(IDataType keyType, IDataType valueType) {
- assert keyType != null;
- assert valueType != null;
- return mapTypeName(keyType.getName(), valueType.getName());
- }
-
- public enum TypeCategory {
- PRIMITIVE,
- ENUM,
- ARRAY,
- MAP,
- STRUCT,
- TRAIT,
- CLASS,
- RELATIONSHIP
- }
-
- public static abstract class PrimitiveType<T> extends AbstractDataType<T> {
- public PrimitiveType(String name, String description) {
- super(name, description);
- }
-
- @Override
- public TypeCategory getTypeCategory() {
- return TypeCategory.PRIMITIVE;
- }
-
- public abstract T nullValue();
-
- @Override
- protected T convertNull(Multiplicity m) throws AtlasException {
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
-
- return nullValue();
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if ( val != null ) {
- digester.update(val.toString().getBytes(Charset.forName("UTF-8")));
- }
- }
-
- }
-
- public static class BooleanType extends PrimitiveType<Boolean> {
-
- private static final String name = "boolean".intern();
-
- private BooleanType() {
- super(name, null);
- }
-
- @Override
- public Boolean convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Boolean) {
- return (Boolean) val;
- } else if (val instanceof String) {
- return Boolean.parseBoolean((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).intValue() != 0;
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Boolean nullValue() {
- return Boolean.FALSE;
- }
- }
-
- public static class ByteType extends PrimitiveType<Byte> {
-
- private static final String name = "byte".intern();
-
- private ByteType() {
- super(name, null);
- }
-
- @Override
- public Byte convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Byte) {
- return (Byte) val;
- } else if (val instanceof String) {
- return Byte.parseByte((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).byteValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Byte nullValue() {
- return 0;
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if ( val != null ) {
- digester.update((Byte) val);
- }
- }
- }
-
- public static class ShortType extends PrimitiveType<Short> {
-
- private static final String name = "short".intern();
-
- private ShortType() {
- super(name, null);
- }
-
- @Override
- public Short convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Short) {
- return (Short) val;
- } else if (val instanceof String) {
- return Short.parseShort((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).shortValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Short nullValue() {
- return 0;
- }
- }
-
- public static class IntType extends PrimitiveType<Integer> {
-
- private static final String name = "int".intern();
-
- private IntType() {
- super(name, null);
- }
-
- @Override
- public Integer convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Integer) {
- return (Integer) val;
- } else if (val instanceof String) {
- return Integer.parseInt((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).intValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Integer nullValue() {
- return 0;
- }
- }
-
- public static class LongType extends PrimitiveType<Long> {
-
- private static final String name = "long".intern();
-
- private LongType() {
- super(name, null);
- }
-
- @Override
- public Long convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Long) {
- return (Long) val;
- } else if (val instanceof String) {
- return Long.parseLong((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).longValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Long nullValue() {
- return 0L;
- }
- }
-
- public static class FloatType extends PrimitiveType<Float> {
-
- private static final String name = "float".intern();
-
- private FloatType() {
- super(name, null);
- }
-
- @Override
- public Float convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Float) {
- return (Float) val;
- } else if (val instanceof String) {
- return Float.parseFloat((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).floatValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Float nullValue() {
- return 0.0f;
- }
- }
-
- public static class DoubleType extends PrimitiveType<Double> {
-
- private static final String name = "double".intern();
-
- private DoubleType() {
- super(name, null);
- }
-
- @Override
- public Double convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Double) {
- return (Double) val;
- } else if (val instanceof String) {
- return Double.parseDouble((String) val);
- } else if (val instanceof Number) {
- return ((Number) val).doubleValue();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public Double nullValue() {
- return 0.0;
- }
- }
-
- public static class BigIntegerType extends PrimitiveType<BigInteger> {
-
- private static final String name = "biginteger".intern();
-
- private BigIntegerType() {
- super(name, null);
- }
-
- @Override
- public BigInteger convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof BigInteger) {
- return (BigInteger) val;
- } else if (val instanceof String) {
- try {
- return new BigInteger((String) val);
- } catch (NumberFormatException ne) {
- throw new ValueConversionException(this, val, ne);
- }
- } else if (val instanceof Number) {
- return BigInteger.valueOf(((Number) val).longValue());
- } else if (val instanceof BigDecimal) {
- return ((BigDecimal) val).toBigInteger();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public BigInteger nullValue() {
- return null;
- }
- }
-
- public static class BigDecimalType extends PrimitiveType<BigDecimal> {
-
- private static final String name = "bigdecimal".intern();
-
- private BigDecimalType() {
- super(name, null);
- }
-
- @Override
- public BigDecimal convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof BigDecimal) {
- return (BigDecimal) val;
- } else if (val instanceof String) {
- try {
- return new BigDecimal((String) val);
- } catch (NumberFormatException ne) {
- throw new ValueConversionException(this, val, ne);
- }
- } else if (val instanceof Number) {
- return new BigDecimal(((Number) val).doubleValue());
- } else if (val instanceof BigInteger) {
- return new BigDecimal((BigInteger) val);
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- public BigDecimal nullValue() {
- return null;
- }
- }
-
- public static class DateType extends PrimitiveType<Date> {
-
- private static final String name = "date".intern();
-
- private DateType() {
- super(name, null);
- }
-
- private static final DateTimeFormatter utcDateFormat = ISODateTimeFormat.dateTime();
-
- @Override
- public Date convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- if (val instanceof Date) {
- return (Date) val;
- } else if (val instanceof String) {
- try {
- return utcDateFormat.parseDateTime((String)val).toDate();
- } catch (Exception ne) {
- throw new ValueConversionException(this, val, ne);
- }
- } else if (val instanceof Number) {
- return new Date(((Number) val).longValue());
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- return convertNull(m);
- }
-
- @Override
- public void output(Date val, Appendable buf, String prefix, Set<Date> inProcess) throws AtlasException {
- TypeUtils.outputVal(val == null ? "<null>" : utcDateFormat.print(new DateTime(val).withZone(DateTimeZone.UTC)), buf,
- prefix);
- }
-
- public Date nullValue() {
- return null;
- }
- }
-
- public static class StringType extends PrimitiveType<String> {
-
- private static final String name = "string".intern();
-
- private StringType() {
- super(name, null);
- }
-
- @Override
- public String convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null && (!(val instanceof String) || StringUtils.isNotEmpty((CharSequence) val))) {
- return val.toString();
- }
-
- if (m.nullAllowed() && val != null){
- return val.toString();
- }
- return convertNull(m);
- }
-
- public String nullValue() {
- return null;
- }
- }
-
- public static class ArrayType extends AbstractDataType<ImmutableCollection<?>> {
- private IDataType elemType;
-
- public ArrayType(IDataType elemType) {
- super(arrayTypeName(elemType), null);
- this.elemType = elemType;
- }
-
- public IDataType getElemType() {
- return elemType;
- }
-
- protected void setElemType(IDataType elemType) {
- this.elemType = elemType;
- }
-
- @Override
- public ImmutableCollection<?> convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- Iterator it = null;
- if (val instanceof Collection) {
- it = ((Collection) val).iterator();
- } else if (val instanceof Iterable) {
- it = ((Iterable) val).iterator();
- } else if (val instanceof Iterator) {
- it = (Iterator) val;
- }
-
- if (it != null) {
- ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
- while (it.hasNext()) {
- b.add(elemType.convert(it.next(),
- TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
- Multiplicity.REQUIRED));
- }
- return m.isUnique ? b.build().asList() : b.build();
- } else {
- try {
- return ImmutableList.of(elemType.convert(val,
- TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
- Multiplicity.REQUIRED));
- } catch (Exception e) {
- throw new ValueConversionException(this, val, e);
- }
- }
- }
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
- return null;
- }
-
- public ImmutableCollection<?> mapIds(ImmutableCollection<?> val, Multiplicity m, Map<Id, Id> transientToNewIds)
- throws AtlasException {
-
- if (val == null || elemType.getTypeCategory() != TypeCategory.CLASS) {
- return val;
- }
- ImmutableCollection.Builder b = m.isUnique ? ImmutableSet.builder() : ImmutableList.builder();
- for (Object elem : val) {
- if (elem instanceof IReferenceableInstance) {
- Id oldId = ((IReferenceableInstance) elem).getId();
- Id newId = transientToNewIds.get(oldId);
- b.add(newId == null ? oldId : newId);
- } else {
- b.add(elem);
- }
- }
- return b.build();
- }
-
- @Override
- public TypeCategory getTypeCategory() {
- return TypeCategory.ARRAY;
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- IDataType elemType = getElemType();
- List vals = (List) val;
- for (Object listElem : vals) {
- elemType.updateSignatureHash(digester, listElem);
- }
- }
- }
-
- public static class MapType extends AbstractDataType<ImmutableMap<?, ?>> {
-
- private IDataType keyType;
- private IDataType valueType;
-
- public MapType(IDataType keyType, IDataType valueType) {
- super(mapTypeName(keyType, valueType), null);
- this.keyType = keyType;
- this.valueType = valueType;
- }
-
- public IDataType getKeyType() {
- return keyType;
- }
-
- protected void setKeyType(IDataType keyType) {
- this.keyType = keyType;
- }
-
- public IDataType getValueType() {
- return valueType;
- }
-
- protected void setValueType(IDataType valueType) {
- this.valueType = valueType;
- }
-
- @Override
- public ImmutableMap<?, ?> convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- Iterator<Map.Entry> it = null;
- if (Map.class.isAssignableFrom(val.getClass())) {
- it = ((Map) val).entrySet().iterator();
- ImmutableMap.Builder b = ImmutableMap.builder();
- while (it.hasNext()) {
- Map.Entry e = it.next();
- b.put(keyType.convert(e.getKey(),
- TypeSystem.getInstance().allowNullsInCollections() ? Multiplicity.OPTIONAL :
- Multiplicity.REQUIRED),
- valueType.convert(e.getValue(), Multiplicity.OPTIONAL));
- }
- return b.build();
- } else {
- throw new ValueConversionException(this, val);
- }
- }
- if (!m.nullAllowed()) {
- throw new ValueConversionException.NullConversionException(m);
- }
- return null;
- }
-
- public ImmutableMap<?, ?> mapIds(ImmutableMap val, Multiplicity m, Map<Id, Id> transientToNewIds)
- throws AtlasException {
-
- if (val == null || (keyType.getTypeCategory() != TypeCategory.CLASS
- && valueType.getTypeCategory() != TypeCategory.CLASS)) {
- return val;
- }
- ImmutableMap.Builder b = ImmutableMap.builder();
- for (Map.Entry elem : (Iterable<Map.Entry>) val.entrySet()) {
- Object oldKey = elem.getKey();
- Object oldValue = elem.getValue();
- Object newKey = oldKey;
- Object newValue = oldValue;
-
- if (oldKey instanceof IReferenceableInstance) {
- Id oldId = ((IReferenceableInstance) oldKey).getId();
- Id newId = transientToNewIds.get(oldId);
- newKey = newId == null ? oldId : newId;
- }
-
- if (oldValue instanceof IReferenceableInstance) {
- Id oldId = ((IReferenceableInstance) oldValue).getId();
- Id newId = transientToNewIds.get(oldId);
- newValue = newId == null ? oldId : newId;
- }
-
- b.put(newKey, newValue);
- }
- return b.build();
- }
-
- @Override
- public TypeCategory getTypeCategory() {
- return TypeCategory.MAP;
- }
-
- @Override
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- IDataType keyType = getKeyType();
- IDataType valueType = getValueType();
- Map vals = (Map) val;
- for (Object key : vals.keySet()) {
- keyType.updateSignatureHash(digester, key);
- valueType.updateSignatureHash(digester, vals.get(key));
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/DownCastFieldMapping.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/DownCastFieldMapping.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/DownCastFieldMapping.java
deleted file mode 100755
index 85e288e..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/DownCastFieldMapping.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.persistence.DownCastStructInstance;
-
-public class DownCastFieldMapping {
-
- public final ImmutableMap<String, String> fieldNameMap;
-
- protected DownCastFieldMapping(ImmutableMap<String, String> fieldNameMap) {
- this.fieldNameMap = fieldNameMap;
- }
-
- public void set(DownCastStructInstance s, String attrName, Object val) throws AtlasException {
-
- String mappedNm = fieldNameMap.get(attrName);
- if (mappedNm == null) {
- throw new ValueConversionException(s.getTypeName(), val, "Unknown field " + attrName);
- }
-
- s.backingInstance.set(mappedNm, val);
- }
-
- public Object get(DownCastStructInstance s, String attrName) throws AtlasException {
-
- String mappedNm = fieldNameMap.get(attrName);
- if (mappedNm == null) {
- throw new ValueConversionException(
- String.format("Unknown field %s for Struct %s", attrName, s.getTypeName()));
- }
- return s.backingInstance.get(mappedNm);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumType.java
deleted file mode 100755
index 82e22ce..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumType.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-import scala.math.BigInt;
-
-import java.nio.charset.Charset;
-import java.security.MessageDigest;
-
-public class EnumType extends AbstractDataType<EnumValue> {
-
- public final TypeSystem typeSystem;
- public final ImmutableMap<String, EnumValue> valueMap;
- public final ImmutableMap<Integer, EnumValue> ordinalMap;
-
- protected EnumType(TypeSystem typeSystem, String name, EnumValue... values) {
- this(typeSystem, name, null, values);
- }
-
- protected EnumType(TypeSystem typeSystem, String name, String description, EnumValue... values) {
- this(typeSystem, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, values);
- }
-
- protected EnumType(TypeSystem typeSystem, String name, String description, String version, EnumValue... values) {
- super(name, description, version);
- this.typeSystem = typeSystem;
- ImmutableMap.Builder<String, EnumValue> b1 = new ImmutableMap.Builder();
- ImmutableMap.Builder<Integer, EnumValue> b2 = new ImmutableMap.Builder();
- for (EnumValue v : values) {
- b1.put(v.value, v);
- b2.put(v.ordinal, v);
- }
- valueMap = b1.build();
- ordinalMap = b2.build();
- }
-
- @Override
- public EnumValue convert(Object val, Multiplicity m) throws AtlasException {
- if (val != null) {
- EnumValue e = null;
- if (val instanceof EnumValue) {
- e = valueMap.get(((EnumValue) val).value);
- } else if (val instanceof Integer || val instanceof BigInt) {
- e = ordinalMap.get(val);
- } else if (val instanceof String) {
- e = valueMap.get(val);
- } else if (val instanceof Number) {
- e = ordinalMap.get(((Number) val).intValue());
- }
-
- if (e == null) {
- throw new ValueConversionException(this, val);
- }
- return e;
- }
- return convertNull(m);
- }
-
- @Override
- public DataTypes.TypeCategory getTypeCategory() {
- return DataTypes.TypeCategory.ENUM;
- }
-
- @Override
- public void validateUpdate(IDataType newType) throws TypeUpdateException {
- super.validateUpdate(newType);
-
- EnumType enumType = (EnumType) newType;
- for (EnumValue enumValue : values()) {
- //The old enum value should be part of new enum definition as well
- if (!enumType.valueMap.containsKey(enumValue.value)) {
- throw new TypeUpdateException("Value " + enumValue.value + " is missing in new type");
- }
-
- //The ordinal for old enum value can't change
- EnumValue newEnumValue = enumType.valueMap.get(enumValue.value);
- if (enumValue.ordinal != newEnumValue.ordinal) {
- throw new TypeUpdateException(String.format("Ordinal mismatch %s(%s) != %s(%s)", enumValue.value,
- enumValue.ordinal, newEnumValue.value, newEnumValue.ordinal));
- }
- }
- }
-
- public void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException {
- if (val != null) {
- digester.update(fromValue((String) val).toString().getBytes(Charset.forName("UTF-8")));
- }
- }
-
- public EnumValue fromOrdinal(int o) {
- return ordinalMap.get(o);
- }
-
- public EnumValue fromValue(String val) {
- return valueMap.get(val.trim());
- }
-
- public ImmutableCollection<EnumValue> values() {
- return valueMap.values();
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumTypeDefinition.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumTypeDefinition.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumTypeDefinition.java
deleted file mode 100755
index 40cb132..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumTypeDefinition.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.utils.ParamChecker;
-import org.apache.atlas.AtlasConstants;
-
-import java.util.Arrays;
-import java.util.Objects;
-
-public final class EnumTypeDefinition {
-
- public final String name;
- public final String description;
- public final String version;
- public final EnumValue[] enumValues;
-
- public EnumTypeDefinition(String name, EnumValue... enumValues) {
- this(name, null, AtlasConstants.DEFAULT_TYPE_VERSION, enumValues);
- }
-
- public EnumTypeDefinition(String name, String description, EnumValue... enumValues) {
- this(name, description, AtlasConstants.DEFAULT_TYPE_VERSION, enumValues);
- }
-
- public EnumTypeDefinition(String name, String description, String version, EnumValue... enumValues) {
- this.name = ParamChecker.notEmpty(name, "Enum type name");
- this.description = description;
- this.enumValues = ParamChecker.notNullElements(enumValues, "Enum values");
- this.version = version;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- EnumTypeDefinition that = (EnumTypeDefinition) o;
- return Objects.equals(name, that.name) &&
- Objects.equals(description, that.description) &&
- Objects.equals(version, that.version) &&
- Arrays.equals(enumValues, that.enumValues);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(name, description, version, enumValues);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumValue.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumValue.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumValue.java
deleted file mode 100755
index d75259b..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/EnumValue.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.utils.ParamChecker;
-
-public class EnumValue {
-
- public final String value;
- public final int ordinal;
-
- public EnumValue(String value, int ordinal) {
- this.value = ParamChecker.notEmpty(value, "Enum value");
- this.ordinal = ordinal;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- }
- if (o == null || getClass() != o.getClass()) {
- return false;
- }
-
- EnumValue enumValue = (EnumValue) o;
-
- if (ordinal != enumValue.ordinal) {
- return false;
- }
- if (!value.equals(enumValue.value)) {
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- int result = value.hashCode();
- result = 31 * result + ordinal;
- return result;
- }
-
- @Override
- public String toString() {
- return value;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/FieldMapping.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/FieldMapping.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/FieldMapping.java
deleted file mode 100755
index a2b3db2..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/FieldMapping.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-public class FieldMapping {
-
- public final Map<String, AttributeInfo> fields;
- public final Map<String, Integer> fieldPos;
- public final Map<String, Integer> fieldNullPos;
- public final int numBools;
- public final int numBytes;
- public final int numShorts;
- public final int numInts;
- public final int numLongs;
- public final int numFloats;
- public final int numDoubles;
- public final int numBigInts;
- public final int numBigDecimals;
- public final int numDates;
- public final int numStrings;
- public final int numArrays;
- public final int numMaps;
- public final int numStructs;
- public final int numReferenceables;
-
- public FieldMapping(Map<String, AttributeInfo> fields, Map<String, Integer> fieldPos,
- Map<String, Integer> fieldNullPos, int numBools, int numBytes, int numShorts, int numInts, int numLongs,
- int numFloats, int numDoubles, int numBigInts, int numBigDecimals, int numDates, int numStrings,
- int numArrays, int numMaps, int numStructs, int numReferenceables) {
- this.fields = fields;
- this.fieldPos = fieldPos;
- this.fieldNullPos = fieldNullPos;
- this.numBools = numBools;
- this.numBytes = numBytes;
- this.numShorts = numShorts;
- this.numInts = numInts;
- this.numLongs = numLongs;
- this.numFloats = numFloats;
- this.numDoubles = numDoubles;
- this.numBigInts = numBigInts;
- this.numBigDecimals = numBigDecimals;
- this.numDates = numDates;
- this.numStrings = numStrings;
- this.numArrays = numArrays;
- this.numMaps = numMaps;
- this.numStructs = numStructs;
- this.numReferenceables = numReferenceables;
- }
-
- protected void outputFields(IStruct s, Appendable buf, String fieldPrefix, Set<? extends IStruct> inProcess) throws AtlasException {
- for (Map.Entry<String, AttributeInfo> e : fields.entrySet()) {
- String attrName = e.getKey();
- AttributeInfo i = e.getValue();
- Object aVal = s.get(attrName);
- TypeUtils.outputVal(attrName + " : ", buf, fieldPrefix);
- if (aVal != null && aVal instanceof Id) {
- TypeUtils.outputVal(aVal.toString(), buf, "");
- } else {
- i.dataType().output(aVal, buf, fieldPrefix, inProcess);
- }
- TypeUtils.outputVal("\n", buf, "");
- }
- }
-
- public void output(IStruct s, Appendable buf, String prefix, Set<IStruct> inProcess) throws AtlasException {
- if (s == null) {
- TypeUtils.outputVal("<null>\n", buf, "");
- return;
- }
-
- if (inProcess == null) {
- inProcess = new HashSet<>();
- }
- else if (inProcess.contains(s)) {
- // Avoid infinite recursion when structs reference each other.
- return;
- }
- inProcess.add(s);
-
- try {
- TypeUtils.outputVal("{", buf, prefix);
-
- TypeUtils.outputVal("\n", buf, "");
- String fieldPrefix = prefix + "\t";
-
- outputFields(s, buf, fieldPrefix, inProcess);
-
- TypeUtils.outputVal("}", buf, prefix);
- }
- finally {
- inProcess.remove(s);
- }
- }
-
- public void output(IReferenceableInstance s, Appendable buf, String prefix, Set<IReferenceableInstance> inProcess) throws AtlasException {
- if (s == null) {
- TypeUtils.outputVal("<null>\n", buf, "");
- return;
- }
-
- if (inProcess == null) {
- inProcess = new HashSet<>();
- }
- else if (inProcess.contains(s)) {
- // Avoid infinite recursion when structs reference each other.
- return;
- }
- inProcess.add(s);
-
- try {
- TypeUtils.outputVal("{", buf, prefix);
-
- TypeUtils.outputVal("\n", buf, "");
- String fieldPrefix = prefix + "\t";
-
- TypeUtils.outputVal("id : ", buf, fieldPrefix);
- TypeUtils.outputVal(s.getId().toString(), buf, "");
- TypeUtils.outputVal("\n", buf, "");
-
- outputFields(s, buf, fieldPrefix, inProcess);
-
- TypeSystem ts = TypeSystem.getInstance();
-
- for (String sT : s.getTraits()) {
- TraitType tt = ts.getDataType(TraitType.class, sT);
- TypeUtils.outputVal(sT + " : ", buf, fieldPrefix);
- tt.output(s.getTrait(sT), buf, fieldPrefix, null);
- }
-
- TypeUtils.outputVal("}", buf, prefix);
- }
- finally {
- inProcess.remove(s);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java
deleted file mode 100755
index ac7f442..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalType.java
+++ /dev/null
@@ -1,545 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.UnmodifiableIterator;
-
-import org.apache.atlas.AtlasConstants;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.persistence.DownCastStructInstance;
-import org.apache.atlas.typesystem.types.TypeUtils.Pair;
-
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Represents a Type that can have SuperTypes. An Instance of the HierarchicalType can be
- * downcast to a SuperType.
- * @param <ST> the Type of the SuperType. TraitTypes have TraitTypes as SuperTypes, ClassTypes
- * have ClassTypes
- * as SuperTypes.
- * @param <T> the class of the Instance of this DataType.
- */
-public abstract class HierarchicalType<ST extends HierarchicalType, T> extends AbstractDataType<T> {
-
- public final TypeSystem typeSystem;
- public final Class<ST> superTypeClass;
- public final FieldMapping fieldMapping;
- public final int numFields;
- public final ImmutableSet<String> superTypes;
- public final ImmutableList<AttributeInfo> immediateAttrs;
- public final ImmutableMap<String, String> attributeNameToType;
- protected ImmutableMap<String, List<Path>> superTypePaths;
- protected ImmutableMap<String, Path> pathNameToPathMap;
-
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, ImmutableSet<String> superTypes,
- int numFields) {
- this(typeSystem, superTypeClass, name, null, superTypes, numFields);
- }
-
- /**
- * Used when creating a Type, to support recursive Structs.
- */
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, String description, ImmutableSet<String> superTypes,
- int numFields) {
- this( typeSystem, superTypeClass, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, numFields);
- }
-
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, String description, String version, ImmutableSet<String> superTypes,
- int numFields) {
- super(name, description, version);
- this.typeSystem = typeSystem;
- this.superTypeClass = superTypeClass;
- this.fieldMapping = null;
- this.numFields = numFields;
- this.superTypes = superTypes;
- this.immediateAttrs = ImmutableList.of();
- this.attributeNameToType = null;
- }
-
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, ImmutableSet<String> superTypes,
- AttributeInfo... fields) throws AtlasException {
- this(typeSystem, superTypeClass, name, null, superTypes, fields);
- }
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, String description, ImmutableSet<String> superTypes,
- AttributeInfo... fields) throws AtlasException {
- this(typeSystem, superTypeClass, name, description, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, fields);
- }
-
- HierarchicalType(TypeSystem typeSystem, Class<ST> superTypeClass, String name, String description, String version, ImmutableSet<String> superTypes,
- AttributeInfo... fields) throws AtlasException {
- super(name, description, version);
- this.typeSystem = typeSystem;
- this.superTypeClass = superTypeClass;
- Pair<FieldMapping, ImmutableMap<String, String>> p = constructFieldMapping(superTypes, fields);
- this.fieldMapping = p.left;
- this.attributeNameToType = p.right;
- this.numFields = this.fieldMapping.fields.size();
- this.superTypes = superTypes == null ? ImmutableSet.<String>of() : superTypes;
- this.immediateAttrs = ImmutableList.copyOf(fields);
- }
-
- public FieldMapping fieldMapping() {
- return fieldMapping;
- }
-
- /**
- * Given type must be a SubType of this type.
- * @param typeName
- * @throws AtlasException
- */
- public boolean isSubType(String typeName) throws AtlasException {
- HierarchicalType cType = typeSystem.getDataType(HierarchicalType.class, typeName);
- return (cType == this || cType.superTypePaths.containsKey(getName()));
- }
-
- /**
- * Validate that current definition can be updated with the new definition
- * @param newType
- * @return true if the current definition can be updated with the new definition, else false
- */
- @Override
- public void validateUpdate(IDataType newType) throws TypeUpdateException {
- super.validateUpdate(newType);
-
- HierarchicalType newHierarchicalType = (HierarchicalType) newType;
-
- //validate on supertypes
-
- if ((newHierarchicalType.superTypes.size() != superTypes.size())
- || !newHierarchicalType.superTypes.containsAll(superTypes)) {
- throw new TypeUpdateException(newType, "New type cannot modify superTypes");
- }
-
- //validate on fields
- try {
- TypeUtils.validateUpdate(fieldMapping, newHierarchicalType.fieldMapping);
- } catch (TypeUpdateException e) {
- throw new TypeUpdateException(newType, e);
- }
- }
-
- protected void setupSuperTypesGraph() throws AtlasException {
- setupSuperTypesGraph(superTypes);
- }
-
- private void setupSuperTypesGraph(ImmutableSet<String> superTypes) throws AtlasException {
- Map<String, List<Path>> superTypePaths = new HashMap<>();
- Map<String, Path> pathNameToPathMap = new HashMap<>();
- Queue<Path> queue = new LinkedList<>();
- queue.add(new Node(getName()));
- while (!queue.isEmpty()) {
- Path currentPath = queue.poll();
-
- ST superType = Objects.equals(currentPath.typeName, getName()) ? (ST) this :
- typeSystem.getDataType(superTypeClass, currentPath.typeName);
-
- pathNameToPathMap.put(currentPath.pathName, currentPath);
- if (superType != this) {
- List<Path> typePaths = superTypePaths.get(superType.getName());
- if (typePaths == null) {
- typePaths = new ArrayList<>();
- superTypePaths.put(superType.getName(), typePaths);
- }
- typePaths.add(currentPath);
- }
-
- ImmutableSet<String> sTs = superType == this ? superTypes : superType.superTypes;
-
- if (sTs != null) {
- for (String sT : sTs) {
- queue.add(new Path(sT, currentPath));
- }
- }
- }
-
- this.superTypePaths = ImmutableMap.copyOf(superTypePaths);
- this.pathNameToPathMap = ImmutableMap.copyOf(pathNameToPathMap);
-
- }
-
- protected Pair<FieldMapping, ImmutableMap<String, String>> constructFieldMapping(ImmutableSet<String> superTypes,
- AttributeInfo... fields) throws AtlasException {
-
- Map<String, AttributeInfo> fieldsMap = new LinkedHashMap();
- Map<String, Integer> fieldPos = new HashMap();
- Map<String, Integer> fieldNullPos = new HashMap();
- Map<String, String> attributeNameToType = new HashMap<>();
-
- int numBools = 0;
- int numBytes = 0;
- int numShorts = 0;
- int numInts = 0;
- int numLongs = 0;
- int numFloats = 0;
- int numDoubles = 0;
- int numBigInts = 0;
- int numBigDecimals = 0;
- int numDates = 0;
- int numStrings = 0;
- int numArrays = 0;
- int numMaps = 0;
- int numStructs = 0;
- int numReferenceables = 0;
-
- setupSuperTypesGraph(superTypes);
-
- Iterator<Path> pathItr = pathIterator();
- while (pathItr.hasNext()) {
- Path currentPath = pathItr.next();
-
- ST superType = Objects.equals(currentPath.typeName, getName()) ? (ST) this :
- typeSystem.getDataType(superTypeClass, currentPath.typeName);
-
- ImmutableList<AttributeInfo> superTypeFields =
- superType == this ? ImmutableList.copyOf(fields) : superType.immediateAttrs;
-
- Set<String> immediateFields = new HashSet<>();
-
- for (AttributeInfo i : superTypeFields) {
- if (superType == this) {
- if (immediateFields.contains(i.name)) {
- throw new AtlasException(String.format(
- "Struct defintion cannot contain multiple fields with the" + " same name %s", i.name));
- }
- immediateFields.add(i.name);
- }
-
- String attrName = i.name;
- if (fieldsMap.containsKey(attrName)) {
- attrName = currentPath.addOverrideAttr(attrName);
- }
- attributeNameToType.put(attrName, superType.getName());
-
- fieldsMap.put(attrName, i);
- fieldNullPos.put(attrName, fieldNullPos.size());
- if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- fieldPos.put(attrName, numBools);
- numBools++;
- } else if (i.dataType() == DataTypes.BYTE_TYPE) {
- fieldPos.put(attrName, numBytes);
- numBytes++;
- } else if (i.dataType() == DataTypes.SHORT_TYPE) {
- fieldPos.put(attrName, numShorts);
- numShorts++;
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- fieldPos.put(attrName, numInts);
- numInts++;
- } else if (i.dataType() == DataTypes.LONG_TYPE) {
- fieldPos.put(attrName, numLongs);
- numLongs++;
- } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
- fieldPos.put(attrName, numFloats);
- numFloats++;
- } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
- fieldPos.put(attrName, numDoubles);
- numDoubles++;
- } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- fieldPos.put(attrName, numBigInts);
- numBigInts++;
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- fieldPos.put(attrName, numBigDecimals);
- numBigDecimals++;
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- fieldPos.put(attrName, numDates);
- numDates++;
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- fieldPos.put(attrName, numStrings);
- numStrings++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) {
- fieldPos.put(i.name, numInts);
- numInts++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- fieldPos.put(attrName, numArrays);
- numArrays++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
- fieldPos.put(attrName, numMaps);
- numMaps++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- fieldPos.put(attrName, numStructs);
- numStructs++;
- } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- fieldPos.put(attrName, numReferenceables);
- numReferenceables++;
- } else {
- throw new AtlasException(String.format("Unknown datatype %s", i.dataType()));
- }
- }
- }
-
- this.superTypePaths = ImmutableMap.copyOf(superTypePaths);
- this.pathNameToPathMap = ImmutableMap.copyOf(pathNameToPathMap);
-
- FieldMapping fm =
- new FieldMapping(fieldsMap, fieldPos, fieldNullPos, numBools, numBytes, numShorts, numInts, numLongs,
- numFloats, numDoubles, numBigInts, numBigDecimals, numDates, numStrings, numArrays, numMaps,
- numStructs, numReferenceables);
-
- return new Pair(fm, ImmutableMap.copyOf(attributeNameToType));
- }
-
- public IStruct castAs(IStruct s, String superTypeName) throws AtlasException {
-
- if (!superTypePaths.containsKey(superTypeName)) {
- throw new AtlasException(String.format("Cannot downcast to %s from type %s", superTypeName, getName()));
- }
-
- if (s != null) {
- if (!Objects.equals(s.getTypeName(), getName())) {
- throw new AtlasException(
- String.format("Downcast called on wrong type %s, instance type is %s", getName(),
- s.getTypeName()));
- }
-
- List<Path> pathToSuper = superTypePaths.get(superTypeName);
- if (pathToSuper.size() > 1) {
- throw new AtlasException(String.format(
- "Cannot downcast called to %s, from %s: there are multiple paths " + "to SuperType",
- superTypeName, getName()));
- }
-
- ST superType = typeSystem.getDataType(superTypeClass, superTypeName);
- Map<String, String> downCastMap = superType.constructDowncastFieldMap(this, pathToSuper.get(0));
- return new DownCastStructInstance(superTypeName, new DownCastFieldMapping(ImmutableMap.copyOf(downCastMap)),
- s);
- }
-
- return null;
- }
-
- public ST getDefinedType(String attrName) throws AtlasException {
- if (!attributeNameToType.containsKey(attrName)) {
- throw new AtlasException(String.format("Unknown attribute %s in type %s", attrName, getName()));
- }
- return typeSystem.getDataType(superTypeClass, attributeNameToType.get(attrName));
- }
-
- public String getDefinedTypeName(String attrName) throws AtlasException {
- return getDefinedType(attrName).getName();
- }
-
- public String getQualifiedName(String attrName) throws AtlasException {
- String attrTypeName = getDefinedTypeName(attrName);
- return attrName.contains(".") ? attrName : String.format("%s.%s", attrTypeName, attrName);
- }
-
- protected Map<String, String> constructDowncastFieldMap(ST subType, Path pathToSubType) {
-
- String pathToSubTypeName = pathToSubType.pathAfterThis;
- /*
- * the downcastMap;
- */
- Map<String, String> dCMap = new HashMap<>();
- Iterator<Path> itr = pathIterator();
- while (itr.hasNext()) {
- Path p = itr.next();
- Path pInSubType = (Path) subType.pathNameToPathMap.get(p.pathName + "." + pathToSubTypeName);
-
- if (pInSubType.hiddenAttributeMap != null) {
- for (Map.Entry<String, String> e : pInSubType.hiddenAttributeMap.entrySet()) {
- String mappedInThisType =
- p.hiddenAttributeMap != null ? p.hiddenAttributeMap.get(e.getKey()) : null;
- if (mappedInThisType == null) {
- dCMap.put(e.getKey(), e.getValue());
- } else {
- dCMap.put(mappedInThisType, e.getValue());
- }
- }
- }
- }
- return dCMap;
- }
-
- @Override
- public String toString() {
- StringBuilder buf = new StringBuilder();
- try {
- output(buf, new HashSet<String>());
- }
- catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- return buf.toString();
- }
-
- @Override
- public void output(Appendable buf, Set<String> typesInProcess) throws AtlasException {
-
- if (typesInProcess == null) {
- typesInProcess = new HashSet<>();
- }
- else if (typesInProcess.contains(name)) {
- // Avoid infinite recursion on bi-directional reference attributes.
- try {
- buf.append(name);
- } catch (IOException e) {
- throw new AtlasException(e);
- }
- return;
- }
-
- typesInProcess.add(name);
- try {
- buf.append(getClass().getSimpleName()).append('{');
- buf.append("name=").append(name);
- buf.append(", description=").append(description);
- buf.append(", superTypes=").append(superTypes.toString());
- buf.append(", immediateAttrs=[");
- UnmodifiableIterator<AttributeInfo> it = immediateAttrs.iterator();
- while (it.hasNext()) {
- AttributeInfo attrInfo = it.next();
- attrInfo.output(buf, typesInProcess);
- if (it.hasNext()) {
- buf.append(", ");
- }
- else {
- buf.append(']');
- }
- }
- buf.append("}");
- }
- catch(IOException e) {
- throw new AtlasException(e);
- }
- finally {
- typesInProcess.remove(name);
- }
- }
-
- public Set<String> getAllSuperTypeNames() {
- return superTypePaths.keySet();
- }
-
- public Iterator<Path> pathIterator() {
- return new PathItr();
- }
-
- static class Path {
- public final String typeName;
- public final String pathName;
- public final String pathAfterThis;
- private final Path subTypePath;
- /*
- * name mapping for attributes hidden by a SubType.
- */ Map<String, String> hiddenAttributeMap;
-
- Path(String typeName, Path childPath) throws AtlasException {
- this.typeName = typeName;
- this.subTypePath = childPath;
- if (childPath.contains(typeName)) {
- throw new CyclicTypeDefinition(this);
- }
- pathName = String.format("%s.%s", typeName, childPath.pathName);
- pathAfterThis = childPath.pathName;
- }
-
- Path(String typeName) {
- assert getClass() == Node.class;
- this.typeName = typeName;
- this.subTypePath = null;
- pathName = typeName;
- pathAfterThis = null;
- }
-
- public boolean contains(String typeName) {
- return this.typeName.equals(typeName) || (subTypePath != null && subTypePath.contains(typeName));
- }
-
- public String pathString(String nodeSep) {
-
- StringBuilder b = new StringBuilder();
- Path p = this;
-
- while (p != null) {
- b.append(p.typeName);
- p = p.subTypePath;
- if (p != null) {
- b.append(nodeSep);
- }
- }
- return b.toString();
- }
-
- String addOverrideAttr(String name) {
- hiddenAttributeMap = hiddenAttributeMap == null ? new HashMap<String, String>() : hiddenAttributeMap;
- String oName = pathName + "." + name;
- hiddenAttributeMap.put(name, oName);
- return oName;
- }
- }
-
- static class Node extends Path {
- Node(String typeName) {
- super(typeName);
- }
- }
-
- static class CyclicTypeDefinition extends AtlasException {
-
- CyclicTypeDefinition(Path p) {
- super(String.format("Cycle in Type Definition %s", p.pathString(" -> ")));
- }
- }
-
- class PathItr implements Iterator<Path> {
-
- Queue<Path> pathQueue;
-
- PathItr() {
- pathQueue = new LinkedList<>();
- pathQueue.add(pathNameToPathMap.get(getName()));
- }
-
- @Override
- public boolean hasNext() {
- return !pathQueue.isEmpty();
- }
-
- @Override
- public Path next() {
- Path p = pathQueue.poll();
-
- if(p != null) {
- ST t = null;
- try {
- t = typeSystem.getDataType(superTypeClass, p.typeName);
- } catch (AtlasException me) {
- throw new RuntimeException(me);
- }
- if (t.superTypes != null) {
- for (String sT : (ImmutableSet<String>) t.superTypes) {
- String nm = sT + "." + p.pathName;
- pathQueue.add(pathNameToPathMap.get(nm));
- }
- }
- }
- return p;
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDefinition.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDefinition.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDefinition.java
deleted file mode 100755
index ab63fea..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDefinition.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.AtlasConstants;
-
-import java.util.Objects;
-
-public class HierarchicalTypeDefinition<T extends HierarchicalType> extends StructTypeDefinition {
-
- public final ImmutableSet<String> superTypes;
- public final String hierarchicalMetaTypeName;
-
- public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, String typeName, String typeDescription, ImmutableSet<String> superTypes,
- AttributeDefinition[] attributeDefinitions) {
- this(hierarchicalMetaType, typeName, typeDescription, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes,
- attributeDefinitions);
- }
-
- // Used only for de-serializing JSON String to typedef.
- public HierarchicalTypeDefinition( String hierarchicalMetaTypeName, String typeName, String typeDescription, String typeVersion, String[] superTypes, AttributeDefinition[] attributeDefinitions) throws ClassNotFoundException {
- this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, typeDescription, typeVersion, ImmutableSet.copyOf(superTypes), attributeDefinitions);
- }
- // Used only for de-serializing JSON String to typedef (no typeVersion).
- public HierarchicalTypeDefinition( String hierarchicalMetaTypeName, String typeName, String typeDescription, String[] superTypes, AttributeDefinition[] attributeDefinitions) throws ClassNotFoundException {
- this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, typeDescription, AtlasConstants.DEFAULT_TYPE_VERSION, ImmutableSet.copyOf(superTypes), attributeDefinitions);
- }
- // Used only for serializing typedef to JSON String.
- public HierarchicalTypeDefinition( String hierarchicalMetaTypeName, String typeName, String typeDescription, String typeVersion, ImmutableSet<String> superTypes, AttributeDefinition[] attributeDefinitions, String typeDef) throws ClassNotFoundException {
- this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, typeDescription, typeVersion, superTypes, attributeDefinitions);
- }
- // Used only for serializing typedef to JSON String (no typeVersion).
- public HierarchicalTypeDefinition( String hierarchicalMetaTypeName, String typeName, String typeDescription, ImmutableSet<String> superTypes, AttributeDefinition[] attributeDefinitions, String typeDef) throws ClassNotFoundException {
- this((Class<T>) Class.forName(hierarchicalMetaTypeName), typeName, typeDescription, AtlasConstants.DEFAULT_TYPE_VERSION, superTypes, attributeDefinitions);
- }
-
- public HierarchicalTypeDefinition(Class<T> hierarchicalMetaType, String typeName, String typeDescription, String typeVersion, ImmutableSet<String> superTypes, AttributeDefinition[] attributeDefinitions) {
- super(typeName, typeDescription, typeVersion, false, attributeDefinitions);
- this.hierarchicalMetaTypeName = hierarchicalMetaType.getName();
- this.superTypes = superTypes == null ? ImmutableSet.<String>of() : superTypes;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- if (!super.equals(o)) return false;
- HierarchicalTypeDefinition<?> that = (HierarchicalTypeDefinition<?>) o;
- return Objects.equals(superTypes, that.superTypes) &&
- Objects.equals(hierarchicalMetaTypeName, that.hierarchicalMetaTypeName);
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(super.hashCode(), superTypes, hierarchicalMetaTypeName);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorter.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorter.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorter.java
deleted file mode 100644
index aaec05c..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/HierarchicalTypeDependencySorter.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.typesystem.types;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import com.google.common.collect.ImmutableSet;
-
-
-/**
- * Sorts hierarchical types by supertype dependency
- */
-public class HierarchicalTypeDependencySorter {
-
- /**
- * Sorts the specified hierarchical types by supertype dependencies,
- * such that any type A which is a supertype of type B
- * will always be located earlier in the result list; that is, the supertype
- * A would be found at some index i and subtype B would be found at some index j,
- * and i < j.
- *
- * @param types hierarchical types to be sorted
- * @return hierarchical types sorted by supertype dependency
- */
- public static <T extends HierarchicalType> List<T> sortTypes(List<T> types) {
- Map<String, T> typesByName = new HashMap<>();
- for (T type : types) {
- typesByName.put(type.name, type);
- }
- List<T> result = new ArrayList<>(types.size());
- Set<T> processed = new HashSet<>();
- for (T type : types) {
- addToResult(type, result, processed, typesByName);
- }
- return result;
- }
-
- private static <T extends HierarchicalType> void addToResult(T type, List<T> result,
- Set<T> processed, Map<String, T> typesByName) {
-
- if (processed.contains(type)) {
- return;
- }
- processed.add(type);
- ImmutableSet<String> superTypeNames = type.superTypes;
- for (String superTypeName : superTypeNames) {
- // Recursively add any supertypes first to the result.
- T superType = typesByName.get(superTypeName);
- if (superType != null) {
- addToResult(superType, result, processed, typesByName);
- }
- }
- result.add(type);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/IConstructableType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/IConstructableType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/IConstructableType.java
deleted file mode 100755
index d54da0a..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/IConstructableType.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.ITypedInstance;
-
-import java.util.List;
-
-public interface IConstructableType<U, T extends ITypedInstance> extends IDataType<U> {
-
- T createInstance() throws AtlasException;
-
- FieldMapping fieldMapping();
-
- List<String> getNames(AttributeInfo info);
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/IDataType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/IDataType.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/IDataType.java
deleted file mode 100755
index a7a2123..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/IDataType.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.apache.atlas.AtlasException;
-
-import java.security.MessageDigest;
-import java.util.Set;
-
-public interface IDataType<T> {
- String getName();
-
- T convert(Object val, Multiplicity m) throws AtlasException;
-
- DataTypes.TypeCategory getTypeCategory();
-
- /**
- * Output a string representation of a value instance of this type.
- *
- * @param val
- * @param buf
- * @param prefix
- * @param inProcess
- * @throws AtlasException
- */
- void output(T val, Appendable buf, String prefix, Set<T> inProcess) throws AtlasException;
-
- /**
- * Output a string representation of this type.
- *
- * @param buf
- * @param typesInProcess
- * @throws AtlasException
- */
- void output(Appendable buf, Set<String> typesInProcess) throws AtlasException;
-
- void validateUpdate(IDataType newType) throws TypeUpdateException;
-
- void updateSignatureHash(MessageDigest digester, Object val) throws AtlasException;
-
- String getDescription();
-
- String getVersion();
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/Multiplicity.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/Multiplicity.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/Multiplicity.java
deleted file mode 100755
index c213d75..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/Multiplicity.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-
-import java.util.Objects;
-
-public final class Multiplicity {
-
- public static final Multiplicity OPTIONAL = new Multiplicity(0, 1, false);
- public static final Multiplicity REQUIRED = new Multiplicity(1, 1, false);
- public static final Multiplicity COLLECTION = new Multiplicity(1, Integer.MAX_VALUE, false);
- public static final Multiplicity SET = new Multiplicity(1, Integer.MAX_VALUE, true);
-
- public final int lower;
- public final int upper;
- public final boolean isUnique;
-
- public Multiplicity(int lower, int upper, boolean isUnique) {
- assert lower >= 0;
- assert upper >= 1;
- assert upper >= lower;
- this.lower = lower;
- this.upper = upper;
- this.isUnique = isUnique;
- }
-
- public boolean isMany() {
- return upper > 1;
- }
-
-
- public boolean nullAllowed() {
- return lower == 0;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
- Multiplicity that = (Multiplicity) o;
- return lower == that.lower &&
- upper == that.upper &&
- isUnique == that.isUnique;
- }
-
- @Override
- public int hashCode() {
- return Objects.hash(lower, upper, isUnique);
- }
-
- @Override
- public String toString() {
- return "{lower=" + lower +
- ", upper=" + upper +
- ", isUnique=" + isUnique +
- '}';
- }
-
- public String toJson() throws JSONException {
- JSONObject json = new JSONObject();
- json.put("lower", lower);
- json.put("upper", upper);
- json.put("isUnique", isUnique);
- return json.toString();
- }
-
- public static Multiplicity fromJson(String jsonStr) throws JSONException {
- JSONObject json = new JSONObject(jsonStr);
- return new Multiplicity(json.getInt("lower"), json.getInt("upper"), json.getBoolean("isUnique"));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java b/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java
deleted file mode 100755
index 9a1847c..0000000
--- a/typesystem/src/main/java/org/apache/atlas/typesystem/types/ObjectGraphTraversal.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Queue;
-import java.util.Set;
-
-public class ObjectGraphTraversal implements Iterator<ObjectGraphTraversal.InstanceTuple> {
-
- final Queue<InstanceTuple> queue;
- final TypeSystem typeSystem;
- Set<Id> processedIds;
-
- public ObjectGraphTraversal(TypeSystem typeSystem, IReferenceableInstance start) throws AtlasException {
- this.typeSystem = typeSystem;
- queue = new LinkedList<>();
- processedIds = new HashSet<>();
- processReferenceableInstance(start);
- }
-
- void processValue(IDataType dT, Object val) throws AtlasException {
- if (val != null) {
- if (dT.getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
- IDataType elemType = ((DataTypes.ArrayType) dT).getElemType();
- processCollection(elemType, val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.MAP) {
- IDataType keyType = ((DataTypes.MapType) dT).getKeyType();
- IDataType valueType = ((DataTypes.MapType) dT).getValueType();
- processMap(keyType, valueType, val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.STRUCT
- || dT.getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
- processStruct(val);
- } else if (dT.getTypeCategory() == DataTypes.TypeCategory.CLASS) {
- processReferenceableInstance(val);
- }
- }
- }
-
- void processMap(IDataType keyType, IDataType valueType, Object val) throws AtlasException {
- if (keyType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE
- && valueType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
- return;
- }
-
- if (val != null) {
- Iterator<Map.Entry> it = null;
- if (Map.class.isAssignableFrom(val.getClass())) {
- it = ((Map) val).entrySet().iterator();
- ImmutableMap.Builder b = ImmutableMap.builder();
- while (it.hasNext()) {
- Map.Entry e = it.next();
- processValue(keyType, e.getKey());
- processValue(valueType, e.getValue());
- }
- }
- }
- }
-
- void processCollection(IDataType elemType, Object val) throws AtlasException {
-
- if (elemType.getTypeCategory() == DataTypes.TypeCategory.PRIMITIVE) {
- return;
- }
-
- if (val != null) {
- Iterator it = null;
- if (val instanceof Collection) {
- it = ((Collection) val).iterator();
- } else if (val instanceof Iterable) {
- it = ((Iterable) val).iterator();
- } else if (val instanceof Iterator) {
- it = (Iterator) val;
- }
- if (it != null) {
- DataTypes.TypeCategory elemCategory = elemType.getTypeCategory();
- while (it.hasNext()) {
- Object elem = it.next();
- processValue(elemType, elem);
- }
- }
- }
- }
-
- void processStruct(Object val) throws AtlasException {
-
- if (val == null || !(val instanceof IStruct)) {
- return;
- }
-
- IStruct i = (IStruct) val;
-
- IConstructableType type = typeSystem.getDataType(IConstructableType.class, i.getTypeName());
-
- for (Map.Entry<String, AttributeInfo> e : type.fieldMapping().fields.entrySet()) {
- AttributeInfo aInfo = e.getValue();
- String attrName = e.getKey();
- if (aInfo.dataType().getTypeCategory() != DataTypes.TypeCategory.PRIMITIVE) {
- processValue(aInfo.dataType(), i.get(attrName));
- }
- }
- }
-
- void processReferenceableInstance(Object val) throws AtlasException {
-
- if (val == null || !(val instanceof IReferenceableInstance || val instanceof Id)) {
- return;
- }
-
- if (val instanceof Id) {
- Id id = (Id) val;
- if (id.isUnassigned()) {
- add(id, null);
- }
- return;
- }
-
- IReferenceableInstance ref = (IReferenceableInstance) val;
- Id id = ref.getId();
- if (id.isUnassigned()) {
- add(id, ref);
- if (!processedIds.contains(id)) {
- processedIds.add(id);
- processStruct(val);
-
- ImmutableList<String> traits = ref.getTraits();
- for (String trait : traits) {
- processStruct(ref.getTrait(trait));
- }
- }
- }
- }
-
- void add(Id id, IReferenceableInstance ref) {
- queue.add(new InstanceTuple(id, ref));
- }
-
-
- @Override
- public boolean hasNext() {
- return !queue.isEmpty();
- }
-
- @Override
- public InstanceTuple next() {
- try {
- InstanceTuple t = queue.poll();
- if(t != null) {
- processReferenceableInstance(t.instance);
- }
- return t;
- } catch (AtlasException me) {
- throw new RuntimeException(me);
- }
- }
-
- @Override
- public void remove() {
- throw new UnsupportedOperationException();
- }
-
- public static class InstanceTuple {
- public final Id id;
- public final IReferenceableInstance instance;
-
- public InstanceTuple(Id id, IReferenceableInstance instance) {
- this.id = id;
- this.instance = instance;
- }
- }
-
-}
[35/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java
index 7f24d5a..03a86f4 100644
--- a/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java
+++ b/repository/src/main/java/org/apache/atlas/discovery/EntityDiscoveryService.java
@@ -24,7 +24,6 @@ import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.SortOrder;
import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.model.discovery.AtlasSearchResult.AtlasFullTextResult;
@@ -44,7 +43,6 @@ import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.query.SelectExpressionHelper;
import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.AtlasGraph;
@@ -70,9 +68,6 @@ import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
-import scala.Option;
-import scala.util.Either;
-import scala.util.parsing.combinator.Parsers.NoSuccess;
import javax.inject.Inject;
import javax.script.Bindings;
@@ -104,7 +99,6 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private static final String DEFAULT_SORT_ATTRIBUTE_NAME = "name";
private final AtlasGraph graph;
- private final DefaultGraphPersistenceStrategy graphPersistenceStrategy;
private final EntityGraphRetriever entityRetriever;
private final AtlasGremlinQueryProvider gremlinQueryProvider;
private final AtlasTypeRegistry typeRegistry;
@@ -116,11 +110,10 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private final UserProfileService userProfileService;
@Inject
- EntityDiscoveryService(MetadataRepository metadataRepository, AtlasTypeRegistry typeRegistry,
+ EntityDiscoveryService(AtlasTypeRegistry typeRegistry,
AtlasGraph graph, GraphBackedSearchIndexer indexer, SearchTracker searchTracker,
UserProfileService userProfileService) throws AtlasException {
this.graph = graph;
- this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
this.entityRetriever = new EntityGraphRetriever(typeRegistry);
this.indexer = indexer;
this.searchTracker = searchTracker;
@@ -685,15 +678,14 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private GremlinQuery toGremlinQuery(String query, int limit, int offset) throws AtlasBaseException {
QueryParams params = validateSearchParams(limit, offset);
- Either<NoSuccess, Expression> either = QueryParser.apply(query, params);
+ Expression expression = QueryParser.apply(query, params);
- if (either.isLeft()) {
+ if (expression == null) {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
}
- Expression expression = either.right().get();
Expression validExpression = QueryProcessor.validate(expression);
- GremlinQuery gremlinQuery = new GremlinTranslator(validExpression, graphPersistenceStrategy).translate();
+ GremlinQuery gremlinQuery = new GremlinTranslator(validExpression).translate();
if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
@@ -730,9 +722,9 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
List<List<Object>> values = new ArrayList<>();
// extract select attributes from gremlin query
- Option<SelectExpression> selectExpr = SelectExpressionHelper.extractSelectExpression(query.expr());
- if (selectExpr.isDefined()) {
- List<AliasExpression> aliases = selectExpr.get().toJavaList();
+ SelectExpression selectExpr = SelectExpressionHelper.extractSelectExpression(query.expr());
+ if (selectExpr != null) {
+ List<AliasExpression> aliases = selectExpr.toJavaList();
if (CollectionUtils.isNotEmpty(aliases)) {
for (AliasExpression alias : aliases) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java
index 3ae41c8..ae45c5c 100644
--- a/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java
+++ b/repository/src/main/java/org/apache/atlas/discovery/EntityLineageService.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -23,6 +23,7 @@ import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.annotation.GraphTransaction;
import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.lineage.AtlasLineageInfo;
import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
@@ -31,11 +32,17 @@ import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.graphdb.AtlasVertex;
+import org.apache.atlas.repository.store.graph.v1.AtlasGraphUtilsV1;
import org.apache.atlas.repository.store.graph.v1.EntityGraphRetriever;
+import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasTypeRegistry;
+import org.apache.atlas.type.AtlasTypeUtil;
import org.apache.atlas.util.AtlasGremlinQueryProvider;
import org.apache.atlas.util.AtlasGremlinQueryProvider.AtlasGremlinQuery;
+import org.apache.atlas.v1.model.lineage.SchemaResponse.SchemaDetails;
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Service;
import javax.inject.Inject;
@@ -45,21 +52,24 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.stream.Collectors;
@Service
public class EntityLineageService implements AtlasLineageService {
- private static final String INPUT_PROCESS_EDGE = "__Process.inputs";
- private static final String OUTPUT_PROCESS_EDGE = "__Process.outputs";
+ private static final String INPUT_PROCESS_EDGE = "__Process.inputs";
+ private static final String OUTPUT_PROCESS_EDGE = "__Process.outputs";
private final AtlasGraph graph;
private final AtlasGremlinQueryProvider gremlinQueryProvider;
private final EntityGraphRetriever entityRetriever;
+ private final AtlasTypeRegistry atlasTypeRegistry;
@Inject
- EntityLineageService(AtlasTypeRegistry typeRegistry, AtlasGraph atlasGraph) throws DiscoveryException {
- this.graph = atlasGraph;
+ EntityLineageService(AtlasTypeRegistry typeRegistry, AtlasGraph atlasGraph) {
+ this.graph = atlasGraph;
this.gremlinQueryProvider = AtlasGremlinQueryProvider.INSTANCE;
- this.entityRetriever = new EntityGraphRetriever(typeRegistry);
+ this.entityRetriever = new EntityGraphRetriever(typeRegistry);
+ this.atlasTypeRegistry = typeRegistry;
}
@Override
@@ -88,6 +98,53 @@ public class EntityLineageService implements AtlasLineageService {
return lineageInfo;
}
+ @Override
+ @GraphTransaction
+ public SchemaDetails getSchemaForHiveTableByName(final String datasetName) throws AtlasBaseException {
+ if (StringUtils.isEmpty(datasetName)) {
+ // TODO: Complete error handling here
+ throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST);
+ }
+
+ AtlasEntityType hive_table = atlasTypeRegistry.getEntityTypeByName("hive_table");
+
+ Map<String, Object> lookupAttributes = new HashMap<>();
+ lookupAttributes.put("qualifiedName", datasetName);
+ String guid = AtlasGraphUtilsV1.getGuidByUniqueAttributes(hive_table, lookupAttributes);
+
+ return getSchemaForHiveTableByGuid(guid);
+ }
+
+ @Override
+ @GraphTransaction
+ public SchemaDetails getSchemaForHiveTableByGuid(final String guid) throws AtlasBaseException {
+ if (StringUtils.isEmpty(guid)) {
+ throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST);
+ }
+ SchemaDetails ret = new SchemaDetails();
+ AtlasEntityType hive_column = atlasTypeRegistry.getEntityTypeByName("hive_column");
+
+ ret.setDataType(AtlasTypeUtil.toClassTypeDefinition(hive_column));
+
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = entityRetriever.toAtlasEntityWithExtInfo(guid);
+ Map<String, AtlasEntity> referredEntities = entityWithExtInfo.getReferredEntities();
+
+ if (MapUtils.isNotEmpty(referredEntities)) {
+ List<Map<String, Object>> rows = referredEntities.entrySet()
+ .stream()
+ .filter(EntityLineageService::isHiveColumn)
+ .map(e -> AtlasTypeUtil.toMap(e.getValue()))
+ .collect(Collectors.toList());
+ ret.setRows(rows);
+ }
+
+ return ret;
+ }
+
+ private static boolean isHiveColumn(Map.Entry<String, AtlasEntity> e) {
+ return StringUtils.equals("hive_column", e.getValue().getTypeName());
+ }
+
private AtlasLineageInfo getLineageInfo(String guid, LineageDirection direction, int depth) throws AtlasBaseException {
Map<String, AtlasEntityHeader> entities = new HashMap<>();
Set<LineageRelation> relations = new HashSet<>();
@@ -108,7 +165,7 @@ public class EntityLineageService implements AtlasLineageService {
continue;
}
- AtlasEntityHeader entity = entityRetriever.toAtlasEntityHeader((AtlasVertex)vertex);
+ AtlasEntityHeader entity = entityRetriever.toAtlasEntityHeader((AtlasVertex) vertex);
if (!entities.containsKey(entity.getGuid())) {
entities.put(entity.getGuid(), entity);
@@ -143,7 +200,7 @@ public class EntityLineageService implements AtlasLineageService {
return ret;
}
- private String getLineageQuery(String entityGuid, LineageDirection direction, int depth) throws AtlasBaseException {
+ private String getLineageQuery(String entityGuid, LineageDirection direction, int depth) {
String lineageQuery = null;
if (direction.equals(LineageDirection.INPUT)) {
@@ -169,18 +226,19 @@ public class EntityLineageService implements AtlasLineageService {
}
private boolean entityExists(String guid) {
- boolean ret = false;
+ boolean ret = false;
Iterator<AtlasVertex> results = graph.query()
- .has(Constants.GUID_PROPERTY_KEY, guid)
- .vertices().iterator();
+ .has(Constants.GUID_PROPERTY_KEY, guid)
+ .vertices().iterator();
while (results.hasNext()) {
AtlasVertex entityVertex = results.next();
List<String> superTypes = GraphHelper.getSuperTypeNames(entityVertex);
- ret = (CollectionUtils.isNotEmpty(superTypes)) ? superTypes.contains(AtlasClient.DATA_SET_SUPER_TYPE) : false;
+ ret = (CollectionUtils.isNotEmpty(superTypes)) && superTypes.contains(AtlasClient.DATA_SET_SUPER_TYPE);
}
return ret;
}
+
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/SearchIndexer.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/SearchIndexer.java b/repository/src/main/java/org/apache/atlas/discovery/SearchIndexer.java
index 8e67e32..b35346e 100755
--- a/repository/src/main/java/org/apache/atlas/discovery/SearchIndexer.java
+++ b/repository/src/main/java/org/apache/atlas/discovery/SearchIndexer.java
@@ -18,11 +18,9 @@
package org.apache.atlas.discovery;
-import org.apache.atlas.listener.TypesChangeListener;
-
/**
* Interface for indexing types.
*/
-public interface SearchIndexer extends TypesChangeListener {
+public interface SearchIndexer {
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java b/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
deleted file mode 100755
index 9b0aa4c..0000000
--- a/repository/src/main/java/org/apache/atlas/discovery/graph/DefaultGraphPersistenceStrategy.java
+++ /dev/null
@@ -1,292 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery.graph;
-
-import java.util.List;
-
-import javax.inject.Inject;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.query.GraphPersistenceStrategies;
-import org.apache.atlas.query.GraphPersistenceStrategies$class;
-import org.apache.atlas.query.TypeUtils;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasEdgeDirection;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.repository.graphdb.GremlinVersion;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructType;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.ImmutableCollection;
-import com.google.common.collect.ImmutableList;
-
-/**
- * Default implementation of GraphPersistenceStrategy.
- */
-public class DefaultGraphPersistenceStrategy implements GraphPersistenceStrategies {
-
- private static final Logger LOG = LoggerFactory.getLogger(DefaultGraphPersistenceStrategy.class);
-
- private final GraphBackedMetadataRepository metadataRepository;
-
- @Inject
- public DefaultGraphPersistenceStrategy(MetadataRepository metadataRepository) {
- this.metadataRepository = (GraphBackedMetadataRepository) metadataRepository;
- }
-
- @Override
- public String typeAttributeName() {
- return metadataRepository.getTypeAttributeName();
- }
-
- @Override
- public String superTypeAttributeName() {
- return metadataRepository.getSuperTypeAttributeName();
- }
-
- @Override
- public String edgeLabel(IDataType<?> dataType, AttributeInfo aInfo) {
- try {
- return metadataRepository.getEdgeLabel(dataType, aInfo);
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public String traitLabel(IDataType<?> dataType, String traitName) {
- return metadataRepository.getTraitLabel(dataType, traitName);
- }
-
- @Override
- public String fieldNameInVertex(IDataType<?> dataType, AttributeInfo aInfo) {
- try {
- return metadataRepository.getFieldNameInVertex(dataType, aInfo);
- } catch (AtlasException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public List<String> traitNames(AtlasVertex AtlasVertex) {
- return GraphHelper.getTraitNames(AtlasVertex);
- }
-
- @Override
- public Id getIdFromVertex(String dataTypeName, AtlasVertex vertex) {
- return GraphHelper.getIdFromVertex(dataTypeName, vertex);
- }
-
- @Override
- public ITypedReferenceableInstance constructClassInstanceId(ClassType classType, Object value) {
- try {
- AtlasVertex classVertex = (AtlasVertex) value;
- ITypedReferenceableInstance classInstance = classType.createInstance(GraphHelper.getIdFromVertex(classVertex),
- new String[0]);
- return classType.convert(classInstance, Multiplicity.OPTIONAL);
- } catch (AtlasException e) {
- LOG.error("error while constructing an instance", e);
- }
- return null;
- }
-
- @Override
- public <U> U constructInstance(IDataType<U> dataType, Object value) {
- try {
- switch (dataType.getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- return dataType.convert(value, Multiplicity.OPTIONAL);
- case ARRAY:
- DataTypes.ArrayType arrType = (DataTypes.ArrayType) dataType;
- IDataType<?> elemType = arrType.getElemType();
- ImmutableCollection.Builder result = ImmutableList.builder();
- List list = (List) value;
- for(Object listElement : list) {
- Object collectionEntry = constructCollectionEntry(elemType, listElement);
- if(collectionEntry != null) {
- result.add(collectionEntry);
- }
- }
- return (U)result.build();
- case MAP:
- // todo
- break;
-
- case STRUCT:
- AtlasVertex structVertex = (AtlasVertex) value;
- StructType structType = (StructType) dataType;
- ITypedStruct structInstance = structType.createInstance();
- TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
-
- if (dataType.getName().equals(idType.getName())) {
- structInstance.set(idType.typeNameAttrName(), GraphHelper.getSingleValuedProperty(structVertex, typeAttributeName(), String.class));
- structInstance.set(idType.idAttrName(), GraphHelper.getSingleValuedProperty(structVertex, idAttributeName(), String.class));
- String stateValue = GraphHelper.getSingleValuedProperty(structVertex, stateAttributeName(), String.class);
- if (stateValue != null) {
- structInstance.set(idType.stateAttrName(), stateValue);
- }
- structInstance.set(idType.versionAttrName(), structVertex.getProperty(versionAttributeName(), Integer.class));
- } else {
- metadataRepository.getGraphToInstanceMapper()
- .mapVertexToInstance(structVertex, structInstance, structType.fieldMapping().fields);
- }
- return dataType.convert(structInstance, Multiplicity.OPTIONAL);
-
- case TRAIT:
- AtlasVertex traitVertex = (AtlasVertex) value;
- TraitType traitType = (TraitType) dataType;
- ITypedStruct traitInstance = traitType.createInstance();
- // todo - this is not right, we should load the Instance associated with this
- // trait. for now just loading the trait struct.
- // metadataRepository.getGraphToInstanceMapper().mapVertexToTraitInstance(
- // traitVertex, dataType.getName(), , traitType, traitInstance);
- metadataRepository.getGraphToInstanceMapper()
- .mapVertexToInstance(traitVertex, traitInstance, traitType.fieldMapping().fields);
- break;
-
- case CLASS:
- AtlasVertex classVertex = (AtlasVertex) value;
- String guid = classVertex.getProperty(Constants.GUID_PROPERTY_KEY, String.class);
- // Check if the instance we need was previously loaded.
- ITypedReferenceableInstance classInstance = RequestContext.get().getInstanceV1(guid);
- if (classInstance == null) {
- classInstance = metadataRepository.getGraphToInstanceMapper().mapGraphToTypedInstance(guid, classVertex);
- }
- return dataType.convert(classInstance, Multiplicity.OPTIONAL);
-
- default:
- throw new UnsupportedOperationException("Load for type " + dataType + "is not supported");
- }
- } catch (AtlasException e) {
- LOG.error("error while constructing an instance", e);
- }
-
- return null;
- }
-
- public <U> U constructCollectionEntry(IDataType<U> elementType, Object value) throws AtlasException {
- switch (elementType.getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- return constructInstance(elementType, value);
- //The array values in case of STRUCT, CLASS contain the edgeId if the outgoing edge which links to the STRUCT, CLASS vertex referenced
- case STRUCT:
- case CLASS:
- String edgeId = (String) value;
- return (U) metadataRepository.getGraphToInstanceMapper().getReferredEntity(edgeId, elementType);
- case ARRAY:
- case MAP:
- case TRAIT:
- return null;
- default:
- throw new UnsupportedOperationException("Load for type " + elementType + " in collections is not supported");
- }
- }
-
- @Override
- public String edgeLabel(TypeUtils.FieldInfo fInfo) {
- return fInfo.reverseDataType() == null ? edgeLabel(fInfo.dataType(), fInfo.attrInfo()) :
- edgeLabel(fInfo.reverseDataType(), fInfo.attrInfo());
- }
-
- @Override
- public AtlasEdgeDirection instanceToTraitEdgeDirection() {
- return AtlasEdgeDirection.OUT;
- }
-
- @Override
- public AtlasEdgeDirection traitToInstanceEdgeDirection() {
- return AtlasEdgeDirection.IN;
- }
-
- @Override
- public String idAttributeName() {
- return metadataRepository.getIdAttributeName();
- }
-
- @Override
- public String stateAttributeName() {
- return metadataRepository.getStateAttributeName();
- }
-
- @Override
- public String versionAttributeName() {
- return metadataRepository.getVersionAttributeName();
- }
-
- @Override
- public boolean collectTypeInstancesIntoVar() {
- return GraphPersistenceStrategies$class.collectTypeInstancesIntoVar(this);
- }
-
- @Override
- public boolean filterBySubTypes() {
- return GraphPersistenceStrategies$class.filterBySubTypes(this);
- }
-
- @Override
- public boolean addGraphVertexPrefix(scala.collection.Traversable<GroovyExpression> preStatements) {
- return GraphPersistenceStrategies$class.addGraphVertexPrefix(this, preStatements);
- }
-
- @Override
- public GremlinVersion getSupportedGremlinVersion() {
- return GraphPersistenceStrategies$class.getSupportedGremlinVersion(this);
- }
-
- @Override
- public GroovyExpression generatePersisentToLogicalConversionExpression(GroovyExpression expr, IDataType<?> t) {
- return GraphPersistenceStrategies$class.generatePersisentToLogicalConversionExpression(this,expr, t);
- }
-
- @Override
- public GroovyExpression addInitialQueryCondition(GroovyExpression expr) {
- return GraphPersistenceStrategies$class.addInitialQueryCondition(this, expr);
- }
-
- @Override
- public boolean isPropertyValueConversionNeeded(IDataType<?> t) {
- return GraphPersistenceStrategies$class.isPropertyValueConversionNeeded(this, t);
- }
-
- @Override
- public AtlasGraph getGraph() throws RepositoryException {
- return metadataRepository.getGraph();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java b/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java
deleted file mode 100755
index aed8659..0000000
--- a/repository/src/main/java/org/apache/atlas/discovery/graph/GraphBackedDiscoveryService.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.discovery.graph;
-
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.discovery.DiscoveryException;
-import org.apache.atlas.discovery.DiscoveryService;
-import org.apache.atlas.exception.AtlasBaseException;
-import org.apache.atlas.query.Expressions;
-import org.apache.atlas.query.GremlinEvaluator;
-import org.apache.atlas.query.GremlinQuery;
-import org.apache.atlas.query.GremlinQueryResult;
-import org.apache.atlas.query.GremlinTranslator;
-import org.apache.atlas.query.QueryParams;
-import org.apache.atlas.query.QueryParser;
-import org.apache.atlas.query.QueryProcessor;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasIndexQuery;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.util.CompiledQueryCacheKey;
-import org.apache.atlas.util.NoopGremlinQuery;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-import scala.util.Either;
-import scala.util.parsing.combinator.Parsers;
-
-import javax.inject.Inject;
-import javax.inject.Singleton;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Graph backed implementation of Search.
- */
-@Singleton
-@Component
-public class GraphBackedDiscoveryService implements DiscoveryService {
-
- private static final Logger LOG = LoggerFactory.getLogger(GraphBackedDiscoveryService.class);
-
- private final AtlasGraph graph;
- private final DefaultGraphPersistenceStrategy graphPersistenceStrategy;
-
- public final static String SCORE = "score";
- /**
- * Where the vertex' internal gremlin id is stored in the Map produced by extractResult()
- */
- public final static String GREMLIN_ID_KEY = "id";
- /**
- * Where the id of an edge's incoming vertex is stored in the Map produced by extractResult()
- */
- public final static String GREMLIN_INVERTEX_KEY = "inVertex";
- /**
- * Where the id of an edge's outgoing vertex is stored in the Map produced by extractResult()
- */
- public final static String GREMLIN_OUTVERTEX_KEY = "outVertex";
- /**
- * Where an edge's label is stored in the Map produced by extractResult()
- */
- public final static String GREMLIN_LABEL_KEY = "label";
-
- @Inject
- GraphBackedDiscoveryService(MetadataRepository metadataRepository, AtlasGraph atlasGraph)
- throws DiscoveryException {
- this.graph = atlasGraph;
- this.graphPersistenceStrategy = new DefaultGraphPersistenceStrategy(metadataRepository);
- }
-
- //For titan 0.5.4, refer to http://s3.thinkaurelius.com/docs/titan/0.5.4/index-backends.html for indexed query
- //http://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query
- // .html#query-string-syntax for query syntax
- @Override
- @GraphTransaction
- public String searchByFullText(String query, QueryParams queryParams) throws DiscoveryException {
- String graphQuery = String.format("v.\"%s\":(%s)", Constants.ENTITY_TEXT_PROPERTY_KEY, query);
- LOG.debug("Full text query: {}", graphQuery);
- Iterator<AtlasIndexQuery.Result<?, ?>> results =graph.indexQuery(Constants.FULLTEXT_INDEX, graphQuery).vertices();
- JSONArray response = new JSONArray();
-
- int index = 0;
- while (results.hasNext() && index < queryParams.offset()) {
- results.next();
- index++;
- }
-
- while (results.hasNext() && response.length() < queryParams.limit()) {
-
- AtlasIndexQuery.Result<?,?> result = results.next();
- AtlasVertex<?,?> vertex = result.getVertex();
-
- JSONObject row = new JSONObject();
- String guid = GraphHelper.getGuid(vertex);
- if (guid != null) { //Filter non-class entities
- try {
- row.put("guid", guid);
- row.put(AtlasClient.TYPENAME, GraphHelper.getTypeName(vertex));
- row.put(SCORE, result.getScore());
- } catch (JSONException e) {
- LOG.error("Unable to create response", e);
- throw new DiscoveryException("Unable to create response");
- }
-
- response.put(row);
- }
- }
- return response.toString();
- }
-
- @Override
- @GraphTransaction
- public String searchByDSL(String dslQuery, QueryParams queryParams) throws DiscoveryException {
- GremlinQueryResult queryResult = evaluate(dslQuery, queryParams);
- return queryResult.toJson();
- }
-
- public GremlinQueryResult evaluate(String dslQuery, QueryParams queryParams) throws DiscoveryException {
- if(LOG.isDebugEnabled()) {
- LOG.debug("Executing dsl query={}", dslQuery);
- }
- try {
- GremlinQuery gremlinQuery = parseAndTranslateDsl(dslQuery, queryParams);
- if(gremlinQuery instanceof NoopGremlinQuery) {
- return new GremlinQueryResult(dslQuery, ((NoopGremlinQuery)gremlinQuery).getDataType(), Collections.emptyList());
- }
-
- return new GremlinEvaluator(gremlinQuery, graphPersistenceStrategy, graph).evaluate();
-
- } catch (Exception e) { // unable to catch ExpressionException
- throw new DiscoveryException("Invalid expression : " + dslQuery, e);
- }
- }
-
- private GremlinQuery parseAndTranslateDsl(String dslQuery, QueryParams queryParams) throws DiscoveryException {
-
- CompiledQueryCacheKey entry = new CompiledQueryCacheKey(dslQuery, queryParams);
- GremlinQuery gremlinQuery = QueryProcessor.compiledQueryCache().get(entry);
- if(gremlinQuery == null) {
- Expressions.Expression validatedExpression = parseQuery(dslQuery, queryParams);
-
- //If the final limit is 0, don't launch the query, return with 0 rows
- if (validatedExpression instanceof Expressions.LimitExpression
- && ((Integer)((Expressions.LimitExpression) validatedExpression).limit().rawValue()) == 0) {
- gremlinQuery = new NoopGremlinQuery(validatedExpression.dataType());
- }
- else {
- gremlinQuery = new GremlinTranslator(validatedExpression, graphPersistenceStrategy).translate();
- if (LOG.isDebugEnabled()) {
- LOG.debug("Query = {}", validatedExpression);
- LOG.debug("Expression Tree = {}", validatedExpression.treeString());
- LOG.debug("Gremlin Query = {}", gremlinQuery.queryStr());
- }
- }
- QueryProcessor.compiledQueryCache().put(entry, gremlinQuery);
- }
- return gremlinQuery;
- }
-
- private Expressions.Expression parseQuery(String dslQuery, QueryParams queryParams) throws DiscoveryException {
- Either<Parsers.NoSuccess, Expressions.Expression> either = QueryParser.apply(dslQuery, queryParams);
- if (either.isRight()) {
- Expressions.Expression expression = either.right().get();
- Expressions.Expression validatedExpression = QueryProcessor.validate(expression);
- return validatedExpression;
- } else {
- throw new DiscoveryException("Invalid expression : " + dslQuery + ". " + either.left());
- }
-
- }
-
- /**
- * Assumes the User is familiar with the persistence structure of the Repository.
- * The given query is run uninterpreted against the underlying Graph Store.
- * The results are returned as a List of Rows. each row is a Map of Key,Value pairs.
- *
- * @param gremlinQuery query in gremlin dsl format
- * @return List of Maps
- * @throws org.apache.atlas.discovery.DiscoveryException
- */
- @Override
- @GraphTransaction
- public List<Map<String, String>> searchByGremlin(String gremlinQuery) throws DiscoveryException {
- LOG.debug("Executing gremlin query={}", gremlinQuery);
- try {
- Object o = graph.executeGremlinScript(gremlinQuery, false);
- return extractResult(o);
- } catch (AtlasBaseException e) {
- throw new DiscoveryException(e);
- }
- }
-
- private List<Map<String, String>> extractResult(final Object o) throws DiscoveryException {
- List<Map<String, String>> result = new ArrayList<>();
- if (o instanceof List) {
- List l = (List) o;
-
- for (Object value : l) {
- Map<String, String> oRow = new HashMap<>();
- if (value instanceof Map) {
- @SuppressWarnings("unchecked") Map<Object, Object> iRow = (Map) value;
- for (Map.Entry e : iRow.entrySet()) {
- Object k = e.getKey();
- Object v = e.getValue();
- oRow.put(k.toString(), v.toString());
- }
- } else if (value instanceof AtlasVertex) {
- AtlasVertex<?,?> vertex = (AtlasVertex<?,?>)value;
- for (String key : vertex.getPropertyKeys()) {
- Object propertyValue = GraphHelper.getProperty(vertex, key);
- if (propertyValue != null) {
- oRow.put(key, propertyValue.toString());
- }
- }
- oRow.put(GREMLIN_ID_KEY, vertex.getId().toString());
-
- } else if (value instanceof String) {
- oRow.put("", value.toString());
- } else if(value instanceof AtlasEdge) {
- AtlasEdge edge = (AtlasEdge) value;
- oRow.put(GREMLIN_ID_KEY, edge.getId().toString());
- oRow.put(GREMLIN_LABEL_KEY, edge.getLabel());
- oRow.put(GREMLIN_INVERTEX_KEY, edge.getInVertex().getId().toString());
- oRow.put(GREMLIN_OUTVERTEX_KEY, edge.getOutVertex().getId().toString());
- for (String propertyKey : edge.getPropertyKeys()) {
- oRow.put(propertyKey, GraphHelper.getProperty(edge, propertyKey).toString());
- }
- } else {
- throw new DiscoveryException(String.format("Cannot process result %s", String.valueOf(value)));
- }
-
- result.add(oRow);
- }
- }
- else {
- result.add(new HashMap<String, String>() {{
- put("result", o.toString());
- }});
- }
- return result;
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/Gremlin2ExpressionFactory.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/Gremlin2ExpressionFactory.java b/repository/src/main/java/org/apache/atlas/gremlin/Gremlin2ExpressionFactory.java
deleted file mode 100644
index 27de0ed..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/Gremlin2ExpressionFactory.java
+++ /dev/null
@@ -1,379 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.gremlin;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.CastExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.ComparisonExpression;
-import org.apache.atlas.groovy.ComparisonExpression.ComparisonOperator;
-import org.apache.atlas.groovy.ComparisonOperatorExpression;
-import org.apache.atlas.groovy.FieldExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-import org.apache.atlas.groovy.ListExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.LogicalExpression;
-import org.apache.atlas.groovy.LogicalExpression.LogicalOperator;
-import org.apache.atlas.groovy.RangeExpression;
-import org.apache.atlas.groovy.TernaryOperatorExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.apache.atlas.query.GraphPersistenceStrategies;
-import org.apache.atlas.query.TypeUtils.FieldInfo;
-import org.apache.atlas.typesystem.types.IDataType;
-
-
-/**
- * Generates gremlin query expressions using Gremlin 2 syntax.
- *
- */
-public class Gremlin2ExpressionFactory extends GremlinExpressionFactory {
-
- private static final String LOOP_METHOD = "loop";
- private static final String CONTAINS = "contains";
- private static final String LOOP_COUNT_FIELD = "loops";
- private static final String PATH_FIELD = "path";
- private static final String ENABLE_PATH_METHOD = "enablePath";
- private static final String BACK_METHOD = "back";
- private static final String LAST_METHOD = "last";
-
- @Override
- public GroovyExpression generateLogicalExpression(GroovyExpression parent, String operator, List<GroovyExpression> operands) {
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, operator, operands);
- }
-
-
- @Override
- public GroovyExpression generateBackReferenceExpression(GroovyExpression parent, boolean inSelect, String alias) {
- if (inSelect && parent == null) {
- return getFieldInSelect();
- }
- else if (inSelect && parent != null) {
- return parent;
- }
- else {
- return new FunctionCallExpression(TraversalStepType.MAP_TO_ELEMENT, parent, BACK_METHOD, new LiteralExpression(alias));
- }
- }
-
- @Override
- public GroovyExpression getLoopExpressionParent(GroovyExpression inputQry) {
- return inputQry;
- }
-
- @Override
- public GroovyExpression generateLoopExpression(GroovyExpression parent,GraphPersistenceStrategies s, IDataType dataType, GroovyExpression loopExpr, String alias, Integer times) {
-
- GroovyExpression emitExpr = generateLoopEmitExpression(s, dataType);
- //note that in Gremlin 2 (unlike Gremlin 3), the parent is not explicitly used. It is incorporated
- //in the loopExpr.
- GroovyExpression whileFunction = null;
- if(times != null) {
- GroovyExpression loopsExpr = new FieldExpression(getItVariable(), LOOP_COUNT_FIELD);
- GroovyExpression timesExpr = new LiteralExpression(times);
- whileFunction = new ClosureExpression(new ComparisonExpression(loopsExpr, ComparisonOperator.LESS_THAN, timesExpr));
- }
- else {
- GroovyExpression pathExpr = new FieldExpression(getItVariable(),PATH_FIELD);
- GroovyExpression itObjectExpr = getCurrentObjectExpression();
- GroovyExpression pathContainsExpr = new FunctionCallExpression(pathExpr, CONTAINS, itObjectExpr);
- whileFunction = new ClosureExpression(new TernaryOperatorExpression(pathContainsExpr, LiteralExpression.FALSE, LiteralExpression.TRUE));
- }
- GroovyExpression emitFunction = new ClosureExpression(emitExpr);
- GroovyExpression loopCall = new FunctionCallExpression(TraversalStepType.BRANCH, loopExpr, LOOP_METHOD, new LiteralExpression(alias), whileFunction, emitFunction);
-
- return new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, loopCall, ENABLE_PATH_METHOD);
- }
-
- @Override
- public GroovyExpression typeTestExpression(GraphPersistenceStrategies s, String typeName, GroovyExpression itRef) {
-
- GroovyExpression superTypeAttrExpr = new FieldExpression(itRef, s.superTypeAttributeName());
- GroovyExpression typeNameExpr = new LiteralExpression(typeName);
- GroovyExpression isSuperTypeExpr = new FunctionCallExpression(superTypeAttrExpr, CONTAINS, typeNameExpr);
- GroovyExpression superTypeMatchesExpr = new TernaryOperatorExpression(superTypeAttrExpr, isSuperTypeExpr, LiteralExpression.FALSE);
-
- GroovyExpression typeAttrExpr = new FieldExpression(itRef, s.typeAttributeName());
- GroovyExpression typeMatchesExpr = new ComparisonExpression(typeAttrExpr, ComparisonOperator.EQUALS, typeNameExpr);
- return new LogicalExpression(typeMatchesExpr, LogicalOperator.OR, superTypeMatchesExpr);
-
- }
-
- @Override
- public GroovyExpression generateSelectExpression(GroovyExpression parent, List<LiteralExpression> sourceNames,
- List<GroovyExpression> srcExprs) {
-
- GroovyExpression srcNamesExpr = new ListExpression(sourceNames);
- List<GroovyExpression> selectArgs = new ArrayList<>();
- selectArgs.add(srcNamesExpr);
- for(GroovyExpression expr : srcExprs) {
- selectArgs.add(new ClosureExpression(expr));
- }
- return new FunctionCallExpression(TraversalStepType.MAP_TO_VALUE, parent, SELECT_METHOD, selectArgs);
- }
-
- @Override
- public GroovyExpression generateFieldExpression(GroovyExpression parent, FieldInfo fInfo, String propertyName, boolean inSelect) {
- return new FieldExpression(parent, propertyName);
- }
-
- @Override
- public GroovyExpression generateHasExpression(GraphPersistenceStrategies s, GroovyExpression parent, String propertyName, String symbol,
- GroovyExpression requiredValue, FieldInfo fInfo) throws AtlasException {
- GroovyExpression op = gremlin2CompOp(symbol);
- GroovyExpression propertyNameExpr = new LiteralExpression(propertyName);
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, HAS_METHOD, propertyNameExpr, op, requiredValue);
- }
-
- @Override
- public GroovyExpression generateLikeExpressionUsingFilter(GroovyExpression parent, String propertyName, GroovyExpression propertyValue) throws AtlasException {
- GroovyExpression itExpr = getItVariable();
- GroovyExpression nameExpr = new FieldExpression(itExpr, propertyName);
- GroovyExpression matchesExpr = new FunctionCallExpression(nameExpr, MATCHES, escapePropertyValue(propertyValue));
- GroovyExpression closureExpr = new ClosureExpression(matchesExpr);
-
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, FILTER_METHOD, closureExpr);
- }
-
- private GroovyExpression escapePropertyValue(GroovyExpression propertyValue) {
- GroovyExpression ret = propertyValue;
-
- if (propertyValue instanceof LiteralExpression) {
- LiteralExpression exp = (LiteralExpression) propertyValue;
-
- if (exp != null && exp.getValue() instanceof String) {
- String stringValue = (String) exp.getValue();
-
- // replace '*' with ".*", replace '?' with '.'
- stringValue = stringValue.replaceAll("\\*", ".*")
- .replaceAll("\\?", ".");
-
- ret = new LiteralExpression(stringValue);
- }
- }
-
- return ret;
- }
-
- private GroovyExpression gremlin2CompOp(String op) throws AtlasException {
-
- GroovyExpression tExpr = new IdentifierExpression("T");
- if(op.equals("=")) {
- return new FieldExpression(tExpr, "eq");
- }
- if(op.equals("!=")) {
- return new FieldExpression(tExpr, "neq");
- }
- if(op.equals(">")) {
- return new FieldExpression(tExpr, "gt");
- }
- if(op.equals(">=")) {
- return new FieldExpression(tExpr, "gte");
- }
- if(op.equals("<")) {
- return new FieldExpression(tExpr, "lt");
- }
- if(op.equals("<=")) {
- return new FieldExpression(tExpr, "lte");
- }
- if(op.equals("in")) {
- return new FieldExpression(tExpr, "in");
- }
- throw new AtlasException("Comparison operator " + op + " not supported in Gremlin");
- }
-
- @Override
- protected GroovyExpression initialExpression(GroovyExpression varExpr, GraphPersistenceStrategies s) {
- return generateSeededTraversalExpresssion(false, varExpr);
- }
-
- @Override
- public GroovyExpression generateSeededTraversalExpresssion(boolean isMap, GroovyExpression varExpr) {
- return new FunctionCallExpression(TraversalStepType.START, varExpr, "_");
- }
-
- @Override
- public GroovyExpression generateRangeExpression(GroovyExpression parent, int startIndex, int endIndex) {
- //treat as barrier step, since limits need to be applied globally (even though it
- //is technically a filter step)
- return new RangeExpression(TraversalStepType.BARRIER, parent, startIndex, endIndex);
- }
-
- @Override
- public boolean isRangeExpression(GroovyExpression expr) {
-
- return (expr instanceof RangeExpression);
- }
-
- @Override
- public int[] getRangeParameters(AbstractFunctionExpression expr) {
-
- if (isRangeExpression(expr)) {
- RangeExpression rangeExpression = (RangeExpression) expr;
- return new int[] {rangeExpression.getStartIndex(), rangeExpression.getEndIndex()};
- }
- else {
- return null;
- }
- }
-
- @Override
- public void setRangeParameters(GroovyExpression expr, int startIndex, int endIndex) {
-
- if (isRangeExpression(expr)) {
- RangeExpression rangeExpression = (RangeExpression) expr;
- rangeExpression.setStartIndex(startIndex);
- rangeExpression.setEndIndex(endIndex);
- }
- else {
- throw new IllegalArgumentException(expr.getClass().getName() + " is not a valid range expression - must be an instance of " + RangeExpression.class.getName());
- }
-
- }
-
- @Override
- public List<GroovyExpression> getOrderFieldParents() {
-
- GroovyExpression itExpr = getItVariable();
- List<GroovyExpression> result = new ArrayList<>(2);
- result.add(new FieldExpression(itExpr, "a"));
- result.add(new FieldExpression(itExpr, "b"));
- return result;
- }
-
-
- @Override
- public GroovyExpression generateOrderByExpression(GroovyExpression parent, List<GroovyExpression> translatedOrderBy, boolean isAscending) {
-
- GroovyExpression aPropertyExpr = translatedOrderBy.get(0);
- GroovyExpression bPropertyExpr = translatedOrderBy.get(1);
-
- GroovyExpression aPropertyNotNull = new ComparisonExpression(aPropertyExpr, ComparisonOperator.NOT_EQUALS, LiteralExpression.NULL);
- GroovyExpression bPropertyNotNull = new ComparisonExpression(bPropertyExpr, ComparisonOperator.NOT_EQUALS, LiteralExpression.NULL);
-
- GroovyExpression aCondition = new TernaryOperatorExpression(aPropertyNotNull, new FunctionCallExpression(aPropertyExpr,TO_LOWER_CASE_METHOD), aPropertyExpr);
- GroovyExpression bCondition = new TernaryOperatorExpression(bPropertyNotNull, new FunctionCallExpression(bPropertyExpr,TO_LOWER_CASE_METHOD), bPropertyExpr);
-
- GroovyExpression comparisonFunction = null;
- if(isAscending) {
- comparisonFunction = new ComparisonOperatorExpression(aCondition, bCondition);
- }
- else {
- comparisonFunction = new ComparisonOperatorExpression(bCondition, aCondition);
- }
- return new FunctionCallExpression(TraversalStepType.BARRIER, parent, ORDER_METHOD, new ClosureExpression(comparisonFunction));
- }
-
-
- @Override
- public GroovyExpression getAnonymousTraversalExpression() {
- return new FunctionCallExpression(TraversalStepType.START, "_");
- }
-
-
-
- @Override
- public GroovyExpression generateGroupByExpression(GroovyExpression parent, GroovyExpression groupByExpression,
- GroovyExpression aggregationFunction) {
- GroovyExpression groupByClosureExpr = new ClosureExpression(groupByExpression);
- GroovyExpression itClosure = new ClosureExpression(getItVariable());
- GroovyExpression result = new FunctionCallExpression(TraversalStepType.BARRIER, parent, "groupBy", groupByClosureExpr, itClosure);
- result = new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, result, "cap");
- result = new FunctionCallExpression(TraversalStepType.END, result, "next");
- result = new FunctionCallExpression(result, "values");
- result = new FunctionCallExpression(result, "toList");
-
- GroovyExpression aggregrationFunctionClosure = new ClosureExpression(aggregationFunction);
- result = new FunctionCallExpression(result, "collect", aggregrationFunctionClosure);
- return result;
- }
-
- @Override
- public GroovyExpression getFieldInSelect() {
- return getItVariable();
- }
- @Override
- public GroovyExpression getGroupBySelectFieldParent() {
- GroovyExpression itExpr = getItVariable();
- return new FunctionCallExpression(itExpr, LAST_METHOD);
- }
-
- //assumes cast already performed
- @Override
- public GroovyExpression generateCountExpression(GroovyExpression itExpr) {
- return new FunctionCallExpression(itExpr, "size");
- }
-
- @Override
- public String getTraversalExpressionClass() {
- return "GremlinPipeline";
- }
-
-
- @Override
- public boolean isSelectGeneratesMap(int aliasCount) {
- //in Gremlin 2 select always generates a map
- return true;
- }
-
- @Override
- public GroovyExpression generateMapExpression(GroovyExpression parent, ClosureExpression closureExpression) {
- return new FunctionCallExpression(TraversalStepType.MAP_TO_ELEMENT, parent, "transform", closureExpression);
- }
-
- @Override
- public GroovyExpression generateGetSelectedValueExpression(LiteralExpression key,
- GroovyExpression rowMap) {
- rowMap = new CastExpression(rowMap, "Row");
- GroovyExpression getExpr = new FunctionCallExpression(rowMap, "getColumn", key);
- return getExpr;
- }
-
- @Override
- public GroovyExpression getCurrentTraverserObject(GroovyExpression traverser) {
- return traverser;
- }
-
- public List<String> getAliasesRequiredByExpression(GroovyExpression expr) {
- if(!(expr instanceof FunctionCallExpression)) {
- return Collections.emptyList();
- }
- FunctionCallExpression fc = (FunctionCallExpression)expr;
- if(! fc.getFunctionName().equals(LOOP_METHOD)) {
- return Collections.emptyList();
- }
- LiteralExpression aliasName = (LiteralExpression)fc.getArguments().get(0);
- return Collections.singletonList(aliasName.getValue().toString());
- }
-
- @Override
- public boolean isRepeatExpression(GroovyExpression expr) {
- if(!(expr instanceof FunctionCallExpression)) {
- return false;
- }
- return ((FunctionCallExpression)expr).getFunctionName().equals(LOOP_METHOD);
- }
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/gremlin/Gremlin3ExpressionFactory.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/gremlin/Gremlin3ExpressionFactory.java b/repository/src/main/java/org/apache/atlas/gremlin/Gremlin3ExpressionFactory.java
deleted file mode 100644
index b936695..0000000
--- a/repository/src/main/java/org/apache/atlas/gremlin/Gremlin3ExpressionFactory.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.gremlin;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.groovy.AbstractFunctionExpression;
-import org.apache.atlas.groovy.CastExpression;
-import org.apache.atlas.groovy.ClosureExpression;
-import org.apache.atlas.groovy.ComparisonExpression;
-import org.apache.atlas.groovy.ComparisonExpression.ComparisonOperator;
-import org.apache.atlas.groovy.ComparisonOperatorExpression;
-import org.apache.atlas.groovy.FieldExpression;
-import org.apache.atlas.groovy.FunctionCallExpression;
-import org.apache.atlas.groovy.GroovyExpression;
-import org.apache.atlas.groovy.IdentifierExpression;
-import org.apache.atlas.groovy.LiteralExpression;
-import org.apache.atlas.groovy.LogicalExpression;
-import org.apache.atlas.groovy.LogicalExpression.LogicalOperator;
-import org.apache.atlas.groovy.TernaryOperatorExpression;
-import org.apache.atlas.groovy.TraversalStepType;
-import org.apache.atlas.groovy.TypeCoersionExpression;
-import org.apache.atlas.query.GraphPersistenceStrategies;
-import org.apache.atlas.query.TypeUtils.FieldInfo;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.IDataType;
-
-/**
- * Generates gremlin query expressions using Gremlin 3 syntax.
- *
- */
-public class Gremlin3ExpressionFactory extends GremlinExpressionFactory {
-
-
-
- private static final String VERTEX_LIST_CLASS = "List<Vertex>";
- private static final String VERTEX_ARRAY_CLASS = "Vertex[]";
- private static final String OBJECT_ARRAY_CLASS = "Object[]";
- private static final String VERTEX_CLASS = "Vertex";
- private static final String FUNCTION_CLASS = "Function";
-
- private static final String VALUE_METHOD = "value";
- private static final String IS_PRESENT_METHOD = "isPresent";
- private static final String MAP_METHOD = "map";
- private static final String VALUES_METHOD = "values";
- private static final String GET_METHOD = "get";
- private static final String OR_ELSE_METHOD = "orElse";
- private static final String PROPERTY_METHOD = "property";
- private static final String BY_METHOD = "by";
- private static final String EQ_METHOD = "eq";
- private static final String EMIT_METHOD = "emit";
- private static final String TIMES_METHOD = "times";
- private static final String REPEAT_METHOD = "repeat";
- private static final String RANGE_METHOD = "range";
- private static final String LAST_METHOD = "last";
- private static final String TO_STRING_METHOD = "toString";
-
- private static final GroovyExpression EMPTY_STRING_EXPRESSION = new LiteralExpression("");
-
- @Override
- public GroovyExpression generateLogicalExpression(GroovyExpression parent, String operator,
- List<GroovyExpression> operands) {
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, operator, operands);
- }
-
- @Override
- public GroovyExpression generateBackReferenceExpression(GroovyExpression parent, boolean inSelect, String alias) {
- if (inSelect) {
- return getFieldInSelect();
- } else {
- return new FunctionCallExpression(TraversalStepType.MAP_TO_ELEMENT, parent, SELECT_METHOD, new LiteralExpression(alias));
- }
- }
-
- @Override
- public GroovyExpression typeTestExpression(GraphPersistenceStrategies s, String typeName, GroovyExpression itRef) {
- LiteralExpression superTypeAttrExpr = new LiteralExpression(s.superTypeAttributeName());
- LiteralExpression typeNameExpr = new LiteralExpression(typeName);
- LiteralExpression typeAttrExpr = new LiteralExpression(s.typeAttributeName());
- FunctionCallExpression result = new FunctionCallExpression(TraversalStepType.FILTER, HAS_METHOD, typeAttrExpr, new FunctionCallExpression(EQ_METHOD, typeNameExpr));
- result = new FunctionCallExpression(TraversalStepType.FILTER, result, "or");
- result = new FunctionCallExpression(TraversalStepType.FILTER, result, HAS_METHOD, superTypeAttrExpr, new FunctionCallExpression(EQ_METHOD, typeNameExpr));
- return result;
-
- }
-
- @Override
- public GroovyExpression generateLoopExpression(GroovyExpression parent,GraphPersistenceStrategies s, IDataType dataType, GroovyExpression loopExpr, String alias, Integer times) {
-
- GroovyExpression emitExpr = generateLoopEmitExpression(s, dataType);
-
- GroovyExpression result = new FunctionCallExpression(TraversalStepType.BRANCH, parent, REPEAT_METHOD, loopExpr);
- if (times != null) {
- GroovyExpression timesExpr = new LiteralExpression(times);
- result = new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, result, TIMES_METHOD, timesExpr);
- }
- result = new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, result, EMIT_METHOD, emitExpr);
- return result;
-
- }
-
- @Override
- public GroovyExpression getLoopExpressionParent(GroovyExpression inputQry) {
- GroovyExpression curTraversal = getAnonymousTraversalStartExpression();
- return curTraversal;
- }
-
- private IdentifierExpression getAnonymousTraversalStartExpression() {
- return new IdentifierExpression(TraversalStepType.START, "__");
- }
-
- @Override
- public GroovyExpression generateSelectExpression(GroovyExpression parent, List<LiteralExpression> sourceNames,
- List<GroovyExpression> srcExprs) {
- FunctionCallExpression result = new FunctionCallExpression(TraversalStepType.MAP_TO_VALUE, parent, SELECT_METHOD, sourceNames);
-
- for (GroovyExpression expr : srcExprs) {
- GroovyExpression closure = new ClosureExpression(expr);
- GroovyExpression castClosure = new TypeCoersionExpression(closure, FUNCTION_CLASS);
- result = new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, result, BY_METHOD, castClosure);
- }
- return result;
- }
-
- @Override
- public GroovyExpression generateFieldExpression(GroovyExpression parent, FieldInfo fInfo,
- String propertyName, boolean inSelect) {
-
- AttributeInfo attrInfo = fInfo.attrInfo();
- IDataType attrType = attrInfo.dataType();
- GroovyExpression propertyNameExpr = new LiteralExpression(propertyName);
- //Whether it is the user or shared graph does not matter here, since we're
- //just getting the conversion expression. Ideally that would be moved someplace else.
- AtlasGraph graph = AtlasGraphProvider.getGraphInstance();
- if (inSelect) {
-
- GroovyExpression expr = new FunctionCallExpression(parent, PROPERTY_METHOD, propertyNameExpr);
- expr = new FunctionCallExpression(expr, OR_ELSE_METHOD, LiteralExpression.NULL);
- return graph.generatePersisentToLogicalConversionExpression(expr, attrType);
- } else {
-
- GroovyExpression unmapped = new FunctionCallExpression(TraversalStepType.FLAT_MAP_TO_VALUES, parent, VALUES_METHOD, propertyNameExpr);
- if (graph.isPropertyValueConversionNeeded(attrType)) {
- GroovyExpression toConvert = new FunctionCallExpression(getItVariable(), GET_METHOD);
-
- GroovyExpression conversionFunction = graph.generatePersisentToLogicalConversionExpression(toConvert,
- attrType);
- return new FunctionCallExpression(TraversalStepType.MAP_TO_VALUE, unmapped, MAP_METHOD, new ClosureExpression(conversionFunction));
- } else {
- return unmapped;
- }
-
- }
- }
-
- private ComparisonOperator getGroovyOperator(String symbol) throws AtlasException {
-
- String toFind = symbol;
- if (toFind.equals("=")) {
- toFind = "==";
- }
- return ComparisonOperator.lookup(toFind);
- }
-
- private String getComparisonFunction(String op) throws AtlasException {
-
- if (op.equals("=")) {
- return "eq";
- }
- if (op.equals("!=")) {
- return "neq";
- }
- if (op.equals(">")) {
- return "gt";
- }
- if (op.equals(">=")) {
- return "gte";
- }
- if (op.equals("<")) {
- return "lt";
- }
- if (op.equals("<=")) {
- return "lte";
- }
- throw new AtlasException("Comparison operator " + op + " not supported in Gremlin");
- }
-
- @Override
- public GroovyExpression generateHasExpression(GraphPersistenceStrategies s, GroovyExpression parent,
- String propertyName, String symbol, GroovyExpression requiredValue, FieldInfo fInfo) throws AtlasException {
-
- AttributeInfo attrInfo = fInfo.attrInfo();
- IDataType attrType = attrInfo.dataType();
- GroovyExpression propertNameExpr = new LiteralExpression(propertyName);
- if (s.isPropertyValueConversionNeeded(attrType)) {
- // for some types, the logical value cannot be stored directly in
- // the underlying graph,
- // and conversion logic is needed to convert the persistent form of
- // the value
- // to the actual value. In cases like this, we generate a conversion
- // expression to
- // do this conversion and use the filter step to perform the
- // comparsion in the gremlin query
- GroovyExpression itExpr = getItVariable();
- GroovyExpression vertexExpr = new CastExpression(new FunctionCallExpression(itExpr, GET_METHOD), VERTEX_CLASS);
- GroovyExpression propertyValueExpr = new FunctionCallExpression(vertexExpr, VALUE_METHOD, propertNameExpr);
- GroovyExpression conversionExpr = s.generatePersisentToLogicalConversionExpression(propertyValueExpr,
- attrType);
-
- GroovyExpression propertyIsPresentExpression = new FunctionCallExpression(
- new FunctionCallExpression(vertexExpr, PROPERTY_METHOD, propertNameExpr), IS_PRESENT_METHOD);
- GroovyExpression valueMatchesExpr = new ComparisonExpression(conversionExpr, getGroovyOperator(symbol),
- requiredValue);
-
- GroovyExpression filterCondition = new LogicalExpression(propertyIsPresentExpression, LogicalOperator.AND,
- valueMatchesExpr);
-
- GroovyExpression filterFunction = new ClosureExpression(filterCondition);
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, FILTER_METHOD, filterFunction);
- } else {
- GroovyExpression valueMatches = new FunctionCallExpression(getComparisonFunction(symbol), requiredValue);
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, HAS_METHOD, propertNameExpr, valueMatches);
- }
- }
-
- @Override
- public GroovyExpression generateLikeExpressionUsingFilter(GroovyExpression parent, String propertyName, GroovyExpression propertyValue) throws AtlasException {
- GroovyExpression vertexExpr = new FunctionCallExpression(getItVariable(), GET_METHOD);
- GroovyExpression propertyValueExpr = new FunctionCallExpression(vertexExpr, VALUE_METHOD, new LiteralExpression(propertyName));
- GroovyExpression matchesExpr = new FunctionCallExpression(propertyValueExpr, MATCHES, escapePropertyValue(propertyValue));
- GroovyExpression closureExpr = new ClosureExpression(matchesExpr);
-
- return new FunctionCallExpression(TraversalStepType.FILTER, parent, FILTER_METHOD, closureExpr);
- }
-
- private GroovyExpression escapePropertyValue(GroovyExpression propertyValue) {
- GroovyExpression ret = propertyValue;
-
- if (propertyValue instanceof LiteralExpression) {
- LiteralExpression exp = (LiteralExpression) propertyValue;
-
- if (exp != null && exp.getValue() instanceof String) {
- String stringValue = (String) exp.getValue();
-
- // replace '*' with ".*", replace '?' with '.'
- stringValue = stringValue.replaceAll("\\*", ".*")
- .replaceAll("\\?", ".");
-
- ret = new LiteralExpression(stringValue);
- }
- }
-
- return ret;
- }
-
- @Override
- protected GroovyExpression initialExpression(GroovyExpression varExpr, GraphPersistenceStrategies s) {
-
- // this bit of groovy magic converts the set of vertices in varName into
- // a String containing the ids of all the vertices. This becomes the
- // argument
- // to g.V(). This is needed because Gremlin 3 does not support
- // _()
- // s"g.V(${varName}.collect{it.id()} as String[])"
-
- GroovyExpression gExpr = getGraphExpression();
- GroovyExpression varRefExpr = new TypeCoersionExpression(varExpr, OBJECT_ARRAY_CLASS);
- GroovyExpression matchingVerticesExpr = new FunctionCallExpression(TraversalStepType.START, gExpr, V_METHOD, varRefExpr);
- GroovyExpression isEmpty = new FunctionCallExpression(varExpr, "isEmpty");
- GroovyExpression emptyGraph = getEmptyTraversalExpression();
-
- GroovyExpression expr = new TernaryOperatorExpression(isEmpty, emptyGraph, matchingVerticesExpr);
-
- return s.addInitialQueryCondition(expr);
- }
-
- private GroovyExpression getEmptyTraversalExpression() {
- GroovyExpression emptyGraph = new FunctionCallExpression(TraversalStepType.START, getGraphExpression(), V_METHOD, EMPTY_STRING_EXPRESSION);
- return emptyGraph;
- }
-
- @Override
- public GroovyExpression generateRangeExpression(GroovyExpression parent, int startIndex, int endIndex) {
- //treat as barrier step, since limits need to be applied globally (even though it
- //is technically a filter step)
- return new FunctionCallExpression(TraversalStepType.BARRIER, parent, RANGE_METHOD, new LiteralExpression(startIndex), new LiteralExpression(endIndex));
- }
-
- @Override
- public boolean isRangeExpression(GroovyExpression expr) {
-
- return (expr instanceof FunctionCallExpression && ((FunctionCallExpression)expr).getFunctionName().equals(RANGE_METHOD));
- }
-
- @Override
- public int[] getRangeParameters(AbstractFunctionExpression expr) {
-
- if (isRangeExpression(expr)) {
- FunctionCallExpression rangeExpression = (FunctionCallExpression) expr;
- List<GroovyExpression> arguments = rangeExpression.getArguments();
- int startIndex = (int)((LiteralExpression)arguments.get(0)).getValue();
- int endIndex = (int)((LiteralExpression)arguments.get(1)).getValue();
- return new int[]{startIndex, endIndex};
- }
- else {
- return null;
- }
- }
-
- @Override
- public void setRangeParameters(GroovyExpression expr, int startIndex, int endIndex) {
-
- if (isRangeExpression(expr)) {
- FunctionCallExpression rangeExpression = (FunctionCallExpression) expr;
- rangeExpression.setArgument(0, new LiteralExpression(Integer.valueOf(startIndex)));
- rangeExpression.setArgument(1, new LiteralExpression(Integer.valueOf(endIndex)));
- }
- else {
- throw new IllegalArgumentException(expr + " is not a valid range expression");
- }
- }
-
- @Override
- public List<GroovyExpression> getOrderFieldParents() {
-
- List<GroovyExpression> result = new ArrayList<>(1);
- result.add(null);
- return result;
- }
-
- @Override
- public GroovyExpression generateOrderByExpression(GroovyExpression parent, List<GroovyExpression> translatedOrderBy,
- boolean isAscending) {
-
- GroovyExpression orderByExpr = translatedOrderBy.get(0);
- GroovyExpression orderByClosure = new ClosureExpression(orderByExpr);
- GroovyExpression orderByClause = new TypeCoersionExpression(orderByClosure, FUNCTION_CLASS);
-
- GroovyExpression aExpr = new IdentifierExpression("a");
- GroovyExpression bExpr = new IdentifierExpression("b");
-
- GroovyExpression aCompExpr = new FunctionCallExpression(new FunctionCallExpression(aExpr, TO_STRING_METHOD), TO_LOWER_CASE_METHOD);
- GroovyExpression bCompExpr = new FunctionCallExpression(new FunctionCallExpression(bExpr, TO_STRING_METHOD), TO_LOWER_CASE_METHOD);
-
- GroovyExpression comparisonExpr = null;
- if (isAscending) {
- comparisonExpr = new ComparisonOperatorExpression(aCompExpr, bCompExpr);
- } else {
- comparisonExpr = new ComparisonOperatorExpression(bCompExpr, aCompExpr);
- }
- ClosureExpression comparisonFunction = new ClosureExpression(comparisonExpr, "a", "b");
- FunctionCallExpression orderCall = new FunctionCallExpression(TraversalStepType.BARRIER, parent, ORDER_METHOD);
- return new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, orderCall, BY_METHOD, orderByClause, comparisonFunction);
- }
-
- @Override
- public GroovyExpression getAnonymousTraversalExpression() {
- return null;
- }
-
- @Override
- public GroovyExpression getFieldInSelect() {
- // this logic is needed to remove extra results from
- // what is emitted by repeat loops. Technically
- // for queries that don't have a loop in them we could just use "it"
- // the reason for this is that in repeat loops with an alias,
- // although the alias gets set to the right value, for some
- // reason the select actually includes all vertices that were traversed
- // through in the loop. In these cases, we only want the last vertex
- // traversed in the loop to be selected. The logic here handles that
- // case by converting the result to a list and just selecting the
- // last item from it.
-
- GroovyExpression itExpr = getItVariable();
- GroovyExpression expr1 = new TypeCoersionExpression(itExpr, VERTEX_ARRAY_CLASS);
- GroovyExpression expr2 = new TypeCoersionExpression(expr1, VERTEX_LIST_CLASS);
-
- return new FunctionCallExpression(expr2, LAST_METHOD);
- }
-
- @Override
- public GroovyExpression generateGroupByExpression(GroovyExpression parent, GroovyExpression groupByExpression,
- GroovyExpression aggregationFunction) {
-
- GroovyExpression result = new FunctionCallExpression(TraversalStepType.BARRIER, parent, "group");
- GroovyExpression groupByClosureExpr = new TypeCoersionExpression(new ClosureExpression(groupByExpression), "Function");
- result = new FunctionCallExpression(TraversalStepType.SIDE_EFFECT, result, "by", groupByClosureExpr);
- result = new FunctionCallExpression(TraversalStepType.END, result, "toList");
-
- GroovyExpression mapValuesClosure = new ClosureExpression(new FunctionCallExpression(new CastExpression(getItVariable(), "Map"), "values"));
-
- result = new FunctionCallExpression(result, "collect", mapValuesClosure);
-
- //when we call Map.values(), we end up with an extra list around the result. We remove this by calling toList().get(0). This
- //leaves us with a list of lists containing the vertices that match each group. We then apply the aggregation functions
- //specified in the select list to each of these inner lists.
-
- result = new FunctionCallExpression(result ,"toList");
- result = new FunctionCallExpression(result, "get", new LiteralExpression(0));
-
- GroovyExpression aggregrationFunctionClosure = new ClosureExpression(aggregationFunction);
- result = new FunctionCallExpression(result, "collect", aggregrationFunctionClosure);
- return result;
- }
-
- @Override
- public GroovyExpression generateSeededTraversalExpresssion(boolean isMap, GroovyExpression valueCollection) {
- GroovyExpression coersedExpression = new TypeCoersionExpression(valueCollection, isMap ? "Map[]" : "Vertex[]");
- if(isMap) {
-
- return new FunctionCallExpression(TraversalStepType.START, "__", coersedExpression);
- }
- else {
- //We cannot always use an anonymous traversal because that breaks repeat steps
- return new FunctionCallExpression(TraversalStepType.START, getEmptyTraversalExpression(), "inject", coersedExpression);
- }
- }
-
- @Override
- public GroovyExpression getGroupBySelectFieldParent() {
- return null;
- }
-
- @Override
- public String getTraversalExpressionClass() {
- return "GraphTraversal";
- }
-
- @Override
- public boolean isSelectGeneratesMap(int aliasCount) {
- //in Gremlin 3, you only get a map if there is more than 1 alias.
- return aliasCount > 1;
- }
-
- @Override
- public GroovyExpression generateMapExpression(GroovyExpression parent, ClosureExpression closureExpression) {
- return new FunctionCallExpression(TraversalStepType.MAP_TO_ELEMENT, parent, "map", closureExpression);
- }
-
- @Override
- public GroovyExpression generateGetSelectedValueExpression(LiteralExpression key,
- GroovyExpression rowMapExpr) {
- rowMapExpr = new CastExpression(rowMapExpr, "Map");
- GroovyExpression getExpr = new FunctionCallExpression(rowMapExpr, "get", key);
- return getExpr;
- }
-
- @Override
- public GroovyExpression getCurrentTraverserObject(GroovyExpression traverser) {
- return new FunctionCallExpression(traverser, "get");
- }
-
- public List<String> getAliasesRequiredByExpression(GroovyExpression expr) {
- return Collections.emptyList();
- }
-
- @Override
- public boolean isRepeatExpression(GroovyExpression expr) {
- if(!(expr instanceof FunctionCallExpression)) {
- return false;
- }
- return ((FunctionCallExpression)expr).getFunctionName().equals(REPEAT_METHOD);
- }
-}
[18/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
deleted file mode 100755
index 1f077ba..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.ApplicationProperties;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.TestModules;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.AtlasGraphQuery;
-import org.apache.atlas.repository.graphdb.AtlasGraphQuery.ComparisionOperator;
-import org.apache.atlas.repository.graphdb.AtlasIndexQuery;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.Iterator;
-
-@Test
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class GraphRepoMapperScaleTest {
-
- private static final String DATABASE_NAME = "foo";
- private static final String TABLE_NAME = "bar";
-
- @Inject
- private GraphBackedMetadataRepository repositoryService;
-
- @Inject
- private GraphBackedSearchIndexer searchIndexer;
-
- private TypeSystem typeSystem = TypeSystem.getInstance();
-
- private String dbGUID;
-
- @BeforeClass
- @GraphTransaction
- public void setUp() throws Exception {
- //force up front graph initialization
- TestUtils.getGraph();
- searchIndexer = new GraphBackedSearchIndexer(new AtlasGraphProvider(), ApplicationProperties.get(), new AtlasTypeRegistry());
- //Make sure we can cleanup the index directory
- Collection<IDataType> typesAdded = TestUtils.createHiveTypes(typeSystem);
- searchIndexer.onAdd(typesAdded);
- }
-
- @BeforeMethod
- public void setupContext() {
- TestUtils.resetRequestContext();
- }
-
- @AfterClass
- public void tearDown() throws Exception {
- TypeSystem.getInstance().reset();
-// AtlasGraphProvider.cleanup();
- }
-
- @Test
- public void testSubmitEntity() throws Exception {
- Referenceable databaseInstance = new Referenceable(TestUtils.DATABASE_TYPE);
- databaseInstance.set("name", DATABASE_NAME);
- databaseInstance.set("description", "foo database");
- // System.out.println("databaseInstance = " + databaseInstance);
-
- ClassType dbType = typeSystem.getDataType(ClassType.class, TestUtils.DATABASE_TYPE);
- ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
-
- dbGUID = result(db).getCreatedEntities().get(0);
-
- Referenceable dbInstance = new Referenceable(dbGUID, TestUtils.DATABASE_TYPE, databaseInstance.getValuesMap());
-
- for (int index = 0; index < 1000; index++) {
- ITypedReferenceableInstance table = createHiveTableInstance(dbInstance, index);
- result(table);
- }
- }
-
- private CreateUpdateEntitiesResult result(ITypedReferenceableInstance db)
- throws RepositoryException, EntityExistsException {
- return repositoryService.createEntities(db);
- }
-
- @Test(dependsOnMethods = "testSubmitEntity")
- public void testSearchIndex() throws Exception {
-
- //Elasticsearch requires some time before index is updated
- Thread.sleep(5000);
- searchWithOutIndex(Constants.GUID_PROPERTY_KEY, dbGUID);
- searchWithOutIndex(Constants.ENTITY_TYPE_PROPERTY_KEY, "column_type");
- searchWithOutIndex(Constants.ENTITY_TYPE_PROPERTY_KEY, TestUtils.TABLE_TYPE);
-
- searchWithOutIndex("hive_table.name", "bar-999");
- searchWithIndex("hive_table.name", "bar-999");
- searchWithIndex("hive_table.created", ComparisionOperator.GREATER_THAN_EQUAL, TestUtils.TEST_DATE_IN_LONG, 1000);
-
- for (int index = 500; index < 600; index++) {
- searchWithIndex("hive_table.name", "bar-" + index);
- }
-
- searchWithIndex(Constants.STATE_PROPERTY_KEY, Id.EntityState.ACTIVE.name());
- }
-
- private void searchWithOutIndex(String key, String value) {
- AtlasGraph graph = TestUtils.getGraph();
- long start = System.currentTimeMillis();
- int count = 0;
- try {
- AtlasGraphQuery query = graph.query().has(key, ComparisionOperator.EQUAL, value);
- Iterable<AtlasVertex> result = query.vertices();
- for (AtlasVertex ignored : result) {
- count++;
- }
- } finally {
- System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
- System.currentTimeMillis() - start) + " ms");
- }
- }
-
-
- private void searchWithIndex(String key, String value) {
- AtlasGraph graph = TestUtils.getGraph();
- long start = System.currentTimeMillis();
- int count = 0;
- try {
- String queryString = "v.\"" + key + "\":(" + value + ")";
- AtlasIndexQuery query = graph.indexQuery(Constants.VERTEX_INDEX, queryString);
- Iterator<AtlasIndexQuery.Result> result = query.vertices();
- while(result.hasNext()) {
- result.next();
- count++;
- }
- } finally {
- System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
- System.currentTimeMillis() - start) + " ms");
- }
- }
-
- private void searchWithIndex(String key, ComparisionOperator op, Object value, int expectedResults) {
- AtlasGraph graph = TestUtils.getGraph();
- long start = System.currentTimeMillis();
- int count = 0;
- try {
- AtlasGraphQuery query = graph.query().has(key, op, value);
- Iterable<AtlasVertex> itrble = query.vertices();
- for (AtlasVertex ignored : itrble) {
- count++;
- }
- } finally {
- System.out.println("Search on [" + key + "=" + value + "] returned results: " + count + ", took " + (
- System.currentTimeMillis() - start) + " ms");
- Assert.assertEquals(count, expectedResults);
- }
- }
-
- private ITypedReferenceableInstance createHiveTableInstance(Referenceable databaseInstance, int uberIndex)
- throws Exception {
-
- Referenceable tableInstance = new Referenceable(TestUtils.TABLE_TYPE);
- tableInstance.set("name", TABLE_NAME + "-" + uberIndex);
- tableInstance.set("description", "bar table" + "-" + uberIndex);
- tableInstance.set("type", "managed");
- tableInstance.set("created", new Date(TestUtils.TEST_DATE_IN_LONG));
- tableInstance.set("tableType", 1); // enum
-
- // refer to an existing class
- tableInstance.set("database", databaseInstance);
-
- ArrayList<String> columnNames = new ArrayList<>();
- columnNames.add("first_name" + "-" + uberIndex);
- columnNames.add("last_name" + "-" + uberIndex);
- tableInstance.set("columnNames", columnNames);
-
- Struct serde1Instance = new Struct("serdeType");
- serde1Instance.set("name", "serde1" + "-" + uberIndex);
- serde1Instance.set("serde", "serde1" + "-" + uberIndex);
- tableInstance.set("serde1", serde1Instance);
-
- Struct serde2Instance = new Struct("serdeType");
- serde2Instance.set("name", "serde2" + "-" + uberIndex);
- serde2Instance.set("serde", "serde2" + "-" + uberIndex);
- tableInstance.set("serde2", serde2Instance);
-
- ArrayList<Referenceable> columns = new ArrayList<>();
- for (int index = 0; index < 5; index++) {
- Referenceable columnInstance = new Referenceable("column_type");
- columnInstance.set("name", "column_" + "-" + uberIndex + "-" + index);
- columnInstance.set("type", "string");
- columns.add(columnInstance);
- }
- tableInstance.set("columns", columns);
-
- ArrayList<Struct> partitions = new ArrayList<>();
- for (int index = 0; index < 5; index++) {
- Struct partitionInstance = new Struct(TestUtils.PARTITION_STRUCT_TYPE);
- partitionInstance.set("name", "partition_" + "-" + uberIndex + "-" + index);
- partitions.add(partitionInstance);
- }
- tableInstance.set("partitions", partitions);
-
- ClassType tableType = typeSystem.getDataType(ClassType.class, TestUtils.TABLE_TYPE);
- return tableType.convert(tableInstance, Multiplicity.REQUIRED);
- }
-}
-
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin2QueryOptimizerTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin2QueryOptimizerTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin2QueryOptimizerTest.java
deleted file mode 100644
index 8f638a0..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin2QueryOptimizerTest.java
+++ /dev/null
@@ -1,364 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.gremlin.Gremlin2ExpressionFactory;
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.testng.annotations.Test;
-
-
-@Test
-public class Gremlin2QueryOptimizerTest extends AbstractGremlinQueryOptimizerTest {
- private static GremlinExpressionFactory FACTORY = null;
-
- @Override
- protected GremlinExpressionFactory getFactory() {
- if (null == FACTORY) {
- FACTORY = new Gremlin2ExpressionFactory();
- }
- return FACTORY;
- }
-
- @Override
- protected String getExpectedGremlinForTestPullHasExpressionsOutOfHas() {
- return "g.V().has('prop1',T.'eq','Fred').has('prop2',T.'eq','George').and(out('out1'),out('out2'))";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrGrouping() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').fill(r);"
- + "g.V().has('prop2',T.'eq','George').fill(r);"
- + "g.V().or(out('out1'),out('out2')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAndOfOrs() {
- return "def r=(([]) as Set);"
- + "g.V().has('p1',T.'eq','e1').has('p3',T.'eq','e3').fill(r);"
- + "g.V().has('p1',T.'eq','e1').has('p4',T.'eq','e4').fill(r);"
- + "g.V().has('p2',T.'eq','e2').has('p3',T.'eq','e3').fill(r);"
- + "g.V().has('p2',T.'eq','e2').has('p4',T.'eq','e4').fill(r);"
- + "r";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestAndWithMultiCallArguments() {
- return "g.V().has('p1',T.'eq','e1').has('p2',T.'eq','e2').has('p3',T.'eq','e3').has('p4',T.'eq','e4')";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrOfAnds() {
-
- return "def r=(([]) as Set);"
- + "g.V().has('p1',T.'eq','e1').has('p2',T.'eq','e2').fill(r);"
- + "g.V().has('p3',T.'eq','e3').has('p4',T.'eq','e4').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestHasNotMovedToResult() {
- return "def r=(([]) as Set);"
- + "def f1={GremlinPipeline x->x.has('p3',T.'eq','e3').as('_src').select(['_src']).fill(r)};"
- + "f1(g.V().has('p1',T.'eq','e1'));"
- + "f1(g.V().has('p2',T.'eq','e2'));"
- + "r._().transform({((Row)it).getColumn('_src')}).as('_src').select(['src1'],{it})";
- }
-
- @Override
- protected String getExpectedGremlinForOptimizeLoopExpression() {
- return "def r=(([]) as Set);"
- + "g.V().has('__typeName','DataSet').has('name',T.'eq','Fred').fill(r);"
- + "g.V().has('__superTypeNames','DataSet').has('name',T.'eq','Fred').fill(r);"
- + "r._().as('label').in('inputTables').out('outputTables').loop('label',{((it.'path'.contains(it.'object'))?(false):(true))},{it.'object'.'__typeName' == 'string' || ((it.'object'.'__superTypeNames')?(it.'object'.'__superTypeNames'.contains('string')):(false))}).enablePath().toList()";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestLongStringEndingWithOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',T.'eq','Fred').has('age',T.'eq','13').out('livesIn').has('state',T.'eq','Massachusetts')};"
- + "def f2={GremlinPipeline x->x.has('p5',T.'eq','e5').has('p6',T.'eq','e6')};"
- + "f2(f1().has('p1',T.'eq','e1').has('p3',T.'eq','e3')).has('p7',T.'eq','e7').fill(r);"
- + "f2(f1().has('p1',T.'eq','e1').has('p3',T.'eq','e3')).has('p8',T.'eq','e8').fill(r);"
- + "f2(f1().has('p1',T.'eq','e1').has('p4',T.'eq','e4')).has('p7',T.'eq','e7').fill(r);"
- + "f2(f1().has('p1',T.'eq','e1').has('p4',T.'eq','e4')).has('p8',T.'eq','e8').fill(r);"
- + "f2(f1().has('p2',T.'eq','e2').has('p3',T.'eq','e3')).has('p7',T.'eq','e7').fill(r);"
- + "f2(f1().has('p2',T.'eq','e2').has('p3',T.'eq','e3')).has('p8',T.'eq','e8').fill(r);"
- + "f2(f1().has('p2',T.'eq','e2').has('p4',T.'eq','e4')).has('p7',T.'eq','e7').fill(r);"
- + "f2(f1().has('p2',T.'eq','e2').has('p4',T.'eq','e4')).has('p8',T.'eq','e8').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestLongStringNotEndingWithOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',T.'eq','Fred').has('age',T.'eq','13').out('livesIn').has('state',T.'eq','Massachusetts')};"
- + "def f2={GremlinPipeline x->x.has('p5',T.'eq','e5').has('p6',T.'eq','e6')};"
- + "def f3={GremlinPipeline x->x.has('p9',T.'eq','e9').fill(r)};"
- + "f3(f2(f1().has('p1',T.'eq','e1').has('p3',T.'eq','e3')).has('p7',T.'eq','e7'));"
- + "f3(f2(f1().has('p1',T.'eq','e1').has('p3',T.'eq','e3')).has('p8',T.'eq','e8'));"
- + "f3(f2(f1().has('p1',T.'eq','e1').has('p4',T.'eq','e4')).has('p7',T.'eq','e7'));"
- + "f3(f2(f1().has('p1',T.'eq','e1').has('p4',T.'eq','e4')).has('p8',T.'eq','e8'));"
- + "f3(f2(f1().has('p2',T.'eq','e2').has('p3',T.'eq','e3')).has('p7',T.'eq','e7'));"
- + "f3(f2(f1().has('p2',T.'eq','e2').has('p3',T.'eq','e3')).has('p8',T.'eq','e8'));"
- + "f3(f2(f1().has('p2',T.'eq','e2').has('p4',T.'eq','e4')).has('p7',T.'eq','e7'));"
- + "f3(f2(f1().has('p2',T.'eq','e2').has('p4',T.'eq','e4')).has('p8',T.'eq','e8'));"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestToListConversion() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').fill(r);"
- + "g.V().has('prop2',T.'eq','George').fill(r);"
- + "r._().toList()";
- }
-
- @Override
- protected String getExpectedGremlinForTestToListWithExtraStuff() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').fill(r);"
- + "g.V().has('prop2',T.'eq','George').fill(r);"
- + "r._().toList().size()";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestAddClosureWithExitExpressionDifferentFromExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',T.'eq','George').out('knows').out('livesIn').fill(r);"
- + "r._().toList().size()";
- }
-
- @Override
- protected String getExpectedGremlinForTestAddClosureNoExitExpression() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',T.'eq','George').out('knows').out('livesIn').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAddClosureWithExitExpressionEqualToExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',T.'eq','Fred').out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',T.'eq','George').out('knows').out('livesIn').fill(r);"
- + "r._().toList()";
- }
-
- @Override
- protected String getExpectedGremlinForTestClosureNotCreatedWhenNoOrs() {
- return "g.V().has('prop1',T.'eq','Fred').has('prop2',T.'eq','George').out('knows').out('livesIn')";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrFollowedByAnd() {
- return "def r=(([]) as Set);"
- + "def f1={GremlinPipeline x->x.has('age',T.'eq','13').has('age',T.'eq','14').fill(r)};"
- + "f1(g.V().has('name',T.'eq','Fred'));"
- + "f1(g.V().has('name',T.'eq','George'));"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrFollowedByOr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').has('age',T.'eq','13').fill(r);"
- + "g.V().has('name',T.'eq','Fred').has('age',T.'eq','14').fill(r);"
- + "g.V().has('name',T.'eq','George').has('age',T.'eq','13').fill(r);"
- + "g.V().has('name',T.'eq','George').has('age',T.'eq','14').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestMassiveOrExpansion() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('h1',T.'eq','h2').has('h3',T.'eq','h4')};"
- + "def f2={GremlinPipeline x->x.has('ha0',T.'eq','hb0').has('hc0',T.'eq','hd0')};"
- + "def f3={GremlinPipeline x->x.has('ha1',T.'eq','hb1').has('hc1',T.'eq','hd1')};"
- + "def f4={GremlinPipeline x->x.has('ha2',T.'eq','hb2').has('hc2',T.'eq','hd2')};"
- + "def f5={GremlinPipeline x->x.has('ha3',T.'eq','hb3').has('hc3',T.'eq','hd3')};"
- + "def f6={GremlinPipeline x->x.has('ha4',T.'eq','hb4').has('hc4',T.'eq','hd4').has('h5',T.'eq','h6').has('h7',T.'eq','h8').fill(r)};"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p10',T.'eq','e10')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p11',T.'eq','e11')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p12',T.'eq','e12')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p13',T.'eq','e13')).has('p24',T.'eq','e24'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p14',T.'eq','e14'));"
- + "f6(f5(f4(f3(f2(f1().has('p20',T.'eq','e20')).has('p21',T.'eq','e21')).has('p22',T.'eq','e22')).has('p23',T.'eq','e23')).has('p24',T.'eq','e24'));"
- + "r";
-
- }
-
- @Override
- protected String getExpectedGremlinForTestAndFollowedByAnd() {
- return "g.V().has('name',T.'eq','Fred').has('name',T.'eq','George').has('age',T.'eq','13').has('age',T.'eq','14')";
-
- }
-
- @Override
- protected String getExpectedGremlinForTestAndFollowedByOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',T.'eq','Fred').has('name',T.'eq','George')};f1().has('age',T.'eq','13').fill(r);"
- + "f1().has('age',T.'eq','14').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestInitialAlias() {
- return "def r=(([]) as Set);"
- + "g.V().as('x').has('name',T.'eq','Fred').fill(r);"
- + "g.V().as('x').has('name',T.'eq','George').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestFinalAlias() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').as('x').fill(r);"
- + "g.V().has('name',T.'eq','George').as('x').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAliasInMiddle() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').as('x').has('age',T.'eq','13').fill(r);"
- + "g.V().has('name',T.'eq','Fred').as('x').has('age',T.'eq','14').fill(r);"
- + "g.V().has('name',T.'eq','George').as('x').has('age',T.'eq','13').fill(r);"
- + "g.V().has('name',T.'eq','George').as('x').has('age',T.'eq','14').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGreminForTestMultipleAliases() {
- return "def r=(([]) as Set);"
- + "def f1={GremlinPipeline x->x.as('y').fill(r)};"
- + "f1(g.V().has('name',T.'eq','Fred').as('x').has('age',T.'eq','13'));"
- + "f1(g.V().has('name',T.'eq','Fred').as('x').has('age',T.'eq','14'));"
- + "f1(g.V().has('name',T.'eq','George').as('x').has('age',T.'eq','13'));"
- + "f1(g.V().has('name',T.'eq','George').as('x').has('age',T.'eq','14'));"
- + "r";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestAliasInOrExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').fill(r);"
- + "g.V().or(has('name',T.'eq','George').as('george')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAliasInAndExpr() {
- return "g.V().has('name',T.'eq','Fred').and(has('name',T.'eq','George').as('george'))";
- }
- @Override
- protected String getExpectedGremlinForTestFlatMapExprInAnd() {
- return "g.V().has('name',T.'eq','Fred').and(out('knows').has('name',T.'eq','George'))";
- }
-
- @Override
- protected String getExpectedGremlinForTestFlatMapExprInOr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').fill(r);"
- + "g.V().or(out('knows').has('name',T.'eq','George')).fill(r);"
- + "r";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestFieldExpressionPushedToResultExpression() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',T.'eq','Fred').fill(r);"
- + "g.V().or(out('knows').has('name',T.'eq','George')).fill(r);"
- + "r._().'name'";
- }
-
- @Override
- protected String getExpectedGremlinFortestOrWithNoChildren() {
- return "def r=(([]) as Set);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestFinalAliasNeeded() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',T.'eq','Fred').as('person').out('livesIn')};"
- + "def f2={GremlinPipeline x->x.as('city').out('state').has('name',T.'eq','Massachusetts').as('__res').select(['person', 'city', '__res']).fill(r)};"
- + "f2(f1().has('name',T.'eq','Chicago'));"
- + "f2(f1().has('name',T.'eq','Boston'));"
- + "r._().as('__tmp').transform({((Row)it).getColumn('person')}).as('person').back('__tmp').transform({((Row)it).getColumn('city')}).as('city').back('__tmp').transform({((Row)it).getColumn('__res')}).as('__res').path().toList().collect({it.tail()})";
- }
-
- @Override
- protected String getExpectedGremlinForTestSimpleRangeExpression() {
- return "def r=(([]) as Set);"
- + "def f1={GremlinPipeline x->x.has('age',T.'eq','34').out('eats').has('size',T.'eq','small').has('color',T.'eq','blue') [0..<10].fill(r)};"
- + "f1(g.V().has('name',T.'eq','Fred'));"
- + "f1(g.V().has('name',T.'eq','George'));"
- + "r._() [0..<10].toList().size()";
- }
-
- @Override
- protected String getExpectedGremlinForTestRangeWithNonZeroOffset() {
- return "def r=(([]) as Set);"
- + "g.V().has('__typeName',T.'eq','OMAS_OMRSAsset').fill(r);"
- + "g.V().has('__superTypeNames',T.'eq','OMAS_OMRSAsset').fill(r);"
- + "r._() [5..<10].as('inst').select(['inst'])";
- }
-
- @Override
- protected String getExpectedGremlinForTestRangeWithOrderBy() {
- return "def r=(([]) as Set);"
- + "g.V().has('__typeName',T.'eq','OMAS_OMRSAsset').fill(r);"
- + "g.V().has('__superTypeNames',T.'eq','OMAS_OMRSAsset').fill(r);"
- + "r._() [5..<10].as('inst').order({((it.'name' != null)?(it.'name'.toLowerCase()):(it.'name')) <=> ((it.'name' != null)?(it.'name'.toLowerCase()):(it.'name'))})";
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin3QueryOptimizerTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin3QueryOptimizerTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin3QueryOptimizerTest.java
deleted file mode 100644
index 47dff14..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/Gremlin3QueryOptimizerTest.java
+++ /dev/null
@@ -1,367 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.gremlin.Gremlin3ExpressionFactory;
-import org.apache.atlas.gremlin.GremlinExpressionFactory;
-import org.testng.annotations.Test;
-
-
-@Test
-public class Gremlin3QueryOptimizerTest extends AbstractGremlinQueryOptimizerTest {
-
- public static GremlinExpressionFactory FACTORY = null;
-
- @Override
- protected GremlinExpressionFactory getFactory() {
- if (null == FACTORY) {
- FACTORY = new Gremlin3ExpressionFactory();
- }
- return FACTORY;
- }
-
- @Override
- protected String getExpectedGremlinForTestPullHasExpressionsOutOfHas() {
- return "g.V().has('prop1',eq('Fred')).has('prop2',eq('George')).and(out('out1'),out('out2'))";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrGrouping() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).fill(r);"
- + "g.V().has('prop2',eq('George')).fill(r);"
- + "g.V().or(out('out1'),out('out2')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAndOfOrs() {
-
- return "def r=(([]) as Set);"
- + "g.V().has('p1',eq('e1')).has('p3',eq('e3')).fill(r);"
- + "g.V().has('p1',eq('e1')).has('p4',eq('e4')).fill(r);"
- + "g.V().has('p2',eq('e2')).has('p3',eq('e3')).fill(r);"
- + "g.V().has('p2',eq('e2')).has('p4',eq('e4')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAndWithMultiCallArguments() {
-
- return "g.V().has('p1',eq('e1')).has('p2',eq('e2')).has('p3',eq('e3')).has('p4',eq('e4'))";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrOfAnds() {
- return "def r=(([]) as Set);"
- + "g.V().has('p1',eq('e1')).has('p2',eq('e2')).fill(r);"
- + "g.V().has('p3',eq('e3')).has('p4',eq('e4')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestHasNotMovedToResult() {
- return "def r=(([]) as Set);"
- + "def f1={GraphTraversal x->x.has('p3',eq('e3')).as('_src').select('_src').fill(r)};"
- + "f1(g.V().has('p1',eq('e1')));f1(g.V().has('p2',eq('e2')));"
- + "g.V('').inject(((r) as Vertex[])).as('_src').select('src1').by((({it}) as Function))";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestLongStringEndingWithOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',eq('Fred')).has('age',eq('13')).out('livesIn').has('state',eq('Massachusetts'))};"
- + "def f2={GraphTraversal x->x.has('p5',eq('e5')).has('p6',eq('e6'))};"
- + "f2(f1().has('p1',eq('e1')).has('p3',eq('e3'))).has('p7',eq('e7')).fill(r);"
- + "f2(f1().has('p1',eq('e1')).has('p3',eq('e3'))).has('p8',eq('e8')).fill(r);"
- + "f2(f1().has('p1',eq('e1')).has('p4',eq('e4'))).has('p7',eq('e7')).fill(r);"
- + "f2(f1().has('p1',eq('e1')).has('p4',eq('e4'))).has('p8',eq('e8')).fill(r);"
- + "f2(f1().has('p2',eq('e2')).has('p3',eq('e3'))).has('p7',eq('e7')).fill(r);"
- + "f2(f1().has('p2',eq('e2')).has('p3',eq('e3'))).has('p8',eq('e8')).fill(r);"
- + "f2(f1().has('p2',eq('e2')).has('p4',eq('e4'))).has('p7',eq('e7')).fill(r);"
- + "f2(f1().has('p2',eq('e2')).has('p4',eq('e4'))).has('p8',eq('e8')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestLongStringNotEndingWithOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',eq('Fred')).has('age',eq('13')).out('livesIn').has('state',eq('Massachusetts'))};"
- + "def f2={GraphTraversal x->x.has('p5',eq('e5')).has('p6',eq('e6'))};"
- + "def f3={GraphTraversal x->x.has('p9',eq('e9')).fill(r)};"
- + "f3(f2(f1().has('p1',eq('e1')).has('p3',eq('e3'))).has('p7',eq('e7')));"
- + "f3(f2(f1().has('p1',eq('e1')).has('p3',eq('e3'))).has('p8',eq('e8')));"
- + "f3(f2(f1().has('p1',eq('e1')).has('p4',eq('e4'))).has('p7',eq('e7')));"
- + "f3(f2(f1().has('p1',eq('e1')).has('p4',eq('e4'))).has('p8',eq('e8')));"
- + "f3(f2(f1().has('p2',eq('e2')).has('p3',eq('e3'))).has('p7',eq('e7')));"
- + "f3(f2(f1().has('p2',eq('e2')).has('p3',eq('e3'))).has('p8',eq('e8')));"
- + "f3(f2(f1().has('p2',eq('e2')).has('p4',eq('e4'))).has('p7',eq('e7')));"
- + "f3(f2(f1().has('p2',eq('e2')).has('p4',eq('e4'))).has('p8',eq('e8')));"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestToListConversion() {
-
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).fill(r);"
- + "g.V().has('prop2',eq('George')).fill(r);"
- + "g.V('').inject(((r) as Vertex[])).toList()";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestToListWithExtraStuff() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).fill(r);"
- + "g.V().has('prop2',eq('George')).fill(r);"
- + "g.V('').inject(((r) as Vertex[])).toList().size()";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestAddClosureWithExitExpressionDifferentFromExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',eq('George')).out('knows').out('livesIn').fill(r);"
- + "g.V('').inject(((r) as Vertex[])).toList().size()";
- }
-
- @Override
- protected String getExpectedGremlinForTestAddClosureNoExitExpression() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',eq('George')).out('knows').out('livesIn').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAddClosureWithExitExpressionEqualToExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('prop1',eq('Fred')).out('knows').out('livesIn').fill(r);"
- + "g.V().has('prop2',eq('George')).out('knows').out('livesIn').fill(r);"
- + "g.V('').inject(((r) as Vertex[])).toList()";
- }
-
- @Override
- protected String getExpectedGremlinForTestClosureNotCreatedWhenNoOrs() {
- return "g.V().has('prop1',eq('Fred')).has('prop2',eq('George')).out('knows').out('livesIn')";
- }
-
- @Override
- protected String getExpectedGremlinForTestOrFollowedByAnd() {
- return "def r=(([]) as Set);"
- + "def f1={GraphTraversal x->x.has('age',eq('13')).has('age',eq('14')).fill(r)};"
- + "f1(g.V().has('name',eq('Fred')));"
- + "f1(g.V().has('name',eq('George')));"
- + "r";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestOrFollowedByOr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).has('age',eq('13')).fill(r);"
- + "g.V().has('name',eq('Fred')).has('age',eq('14')).fill(r);"
- + "g.V().has('name',eq('George')).has('age',eq('13')).fill(r);"
- + "g.V().has('name',eq('George')).has('age',eq('14')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestMassiveOrExpansion() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('h1',eq('h2')).has('h3',eq('h4'))};"
- + "def f2={GraphTraversal x->x.has('ha0',eq('hb0')).has('hc0',eq('hd0'))};"
- + "def f3={GraphTraversal x->x.has('ha1',eq('hb1')).has('hc1',eq('hd1'))};"
- + "def f4={GraphTraversal x->x.has('ha2',eq('hb2')).has('hc2',eq('hd2'))};"
- + "def f5={GraphTraversal x->x.has('ha3',eq('hb3')).has('hc3',eq('hd3'))};"
- + "def f6={GraphTraversal x->x.has('ha4',eq('hb4')).has('hc4',eq('hd4')).has('h5',eq('h6')).has('h7',eq('h8')).fill(r)};"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p10',eq('e10'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p11',eq('e11'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p12',eq('e12'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p13',eq('e13'))).has('p24',eq('e24')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p14',eq('e14')));"
- + "f6(f5(f4(f3(f2(f1().has('p20',eq('e20'))).has('p21',eq('e21'))).has('p22',eq('e22'))).has('p23',eq('e23'))).has('p24',eq('e24')));"
- + "r";
-
- }
-
- @Override
- protected String getExpectedGremlinForTestAndFollowedByAnd() {
- return "g.V().has('name',eq('Fred')).has('name',eq('George')).has('age',eq('13')).has('age',eq('14'))";
- }
-
-
- @Override
- protected String getExpectedGremlinForTestAndFollowedByOr() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',eq('Fred')).has('name',eq('George'))};"
- + "f1().has('age',eq('13')).fill(r);"
- + "f1().has('age',eq('14')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestInitialAlias() {
- return "def r=(([]) as Set);"
- + "g.V().as('x').has('name',eq('Fred')).fill(r);"
- + "g.V().as('x').has('name',eq('George')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestFinalAlias() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).as('x').fill(r);"
- + "g.V().has('name',eq('George')).as('x').fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAliasInMiddle() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).as('x').has('age',eq('13')).fill(r);"
- + "g.V().has('name',eq('Fred')).as('x').has('age',eq('14')).fill(r);"
- + "g.V().has('name',eq('George')).as('x').has('age',eq('13')).fill(r);"
- + "g.V().has('name',eq('George')).as('x').has('age',eq('14')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGreminForTestMultipleAliases() {
- return "def r=(([]) as Set);"
- + "def f1={GraphTraversal x->x.as('y').fill(r)};"
- + "f1(g.V().has('name',eq('Fred')).as('x').has('age',eq('13')));"
- + "f1(g.V().has('name',eq('Fred')).as('x').has('age',eq('14')));"
- + "f1(g.V().has('name',eq('George')).as('x').has('age',eq('13')));"
- + "f1(g.V().has('name',eq('George')).as('x').has('age',eq('14')));"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAliasInOrExpr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).fill(r);"
- + "g.V().or(has('name',eq('George')).as('george')).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestAliasInAndExpr() {
- return "g.V().has('name',eq('Fred')).and(has('name',eq('George')).as('george'))";
- }
-
- @Override
- protected String getExpectedGremlinForTestFlatMapExprInAnd() {
- return "g.V().has('name',eq('Fred')).and(out('knows').has('name',eq('George')))";
- }
-
- @Override
- protected String getExpectedGremlinForTestFlatMapExprInOr() {
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).fill(r);"
- + "g.V().or(out('knows').has('name',eq('George'))).fill(r);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestFieldExpressionPushedToResultExpression() {
-
- return "def r=(([]) as Set);"
- + "g.V().has('name',eq('Fred')).fill(r);"
- + "g.V().or(out('knows').has('name',eq('George'))).fill(r);"
- + "g.V('').inject(((r) as Vertex[])).values('name')";
- }
-
- @Override
- protected String getExpectedGremlinFortestOrWithNoChildren() {
- return "def r=(([]) as Set);"
- + "r";
- }
-
- @Override
- protected String getExpectedGremlinForTestFinalAliasNeeded() {
- return "def r=(([]) as Set);"
- + "def f1={g.V().has('name',eq('Fred')).as('person').out('livesIn')};"
- + "def f2={GraphTraversal x->x.as('city').out('state').has('name',eq('Massachusetts')).as('__res').select('person','city','__res').fill(r)};"
- + "f2(f1().has('name',eq('Chicago')));f2(f1().has('name',eq('Boston')));"
- + "__(((r) as Map[])).as('__tmp').map({((Map)it.get()).get('person')}).as('person').select('__tmp').map({((Map)it.get()).get('city')}).as('city').select('__tmp').map({((Map)it.get()).get('__res')}).as('__res').path().toList().collect({it.tail()})";
- }
-
- @Override
- protected String getExpectedGremlinForTestSimpleRangeExpression() {
- return "def r=(([]) as Set);"
- + "def f1={GraphTraversal x->x.has('age',eq('34')).out('eats').has('size',eq('small')).has('color',eq('blue')).range(0,10).fill(r)};"
- + "f1(g.V().has('name',eq('Fred')));"
- + "f1(g.V().has('name',eq('George')));"
- + "g.V('').inject(((r) as Vertex[])).range(0,10).toList().size()";
- }
-
- @Override
- protected String getExpectedGremlinForOptimizeLoopExpression() {
- return "def r=(([]) as Set);def f1={GraphTraversal x->x.has('name',eq('Fred')).as('label').select('label').fill(r)};"
- + "f1(g.V().has('__typeName','DataSet'));"
- + "f1(g.V().has('__superTypeNames','DataSet'));"
- + "g.V('').inject(((r) as Vertex[])).as('label').repeat(__.in('inputTables').out('outputTables')).emit(has('__typeName',eq('string')).or().has('__superTypeNames',eq('string'))).toList()";
- }
-
- @Override
- protected String getExpectedGremlinForTestRangeWithNonZeroOffset() {
- return "def r=(([]) as Set);" +
- "g.V().has('__typeName',eq('OMAS_OMRSAsset')).fill(r);" +
- "g.V().has('__superTypeNames',eq('OMAS_OMRSAsset')).fill(r);" +
- "g.V('').inject(((r) as Vertex[])).range(5,10).as('inst').select('inst')";
- }
-
- @Override
- protected String getExpectedGremlinForTestRangeWithOrderBy() {
- return "def r=(([]) as Set);"
- + "g.V().has('__typeName',eq('OMAS_OMRSAsset')).fill(r);"
- + "g.V().has('__superTypeNames',eq('OMAS_OMRSAsset')).fill(r);"
- + "g.V('').inject(((r) as Vertex[])).range(5,10).as('inst').order().by((({it.get().values('name')}) as Function),{a, b->a.toString().toLowerCase() <=> b.toString().toLowerCase()})";
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateHardDeleteTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateHardDeleteTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateHardDeleteTest.java
deleted file mode 100644
index a3207fb..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateHardDeleteTest.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.TestModules;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-
-import java.util.List;
-
-
-/**
- * Run tests in {@link ReverseReferenceUpdateTestBase} with hard delete enabled.
- *
- */
-@Guice(modules = TestModules.HardDeleteModule.class)
-public class ReverseReferenceUpdateHardDeleteTest extends ReverseReferenceUpdateTestBase {
- @Override
- void assertTestOneToOneReference(Object refValue, ITypedReferenceableInstance expectedValue, ITypedReferenceableInstance referencingInstance) throws Exception {
- // Verify reference was disconnected
- Assert.assertNull(refValue);
- }
-
- @Override
- void assertTestOneToManyReference(Object object, ITypedReferenceableInstance referencingInstance) {
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 1);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateSoftDeleteTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateSoftDeleteTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateSoftDeleteTest.java
deleted file mode 100644
index ce43bdc..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateSoftDeleteTest.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.TestModules;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-
-import java.util.Iterator;
-import java.util.List;
-
-
-/**
- * Run tests in {@link ReverseReferenceUpdateTestBase} with soft delete enabled.
- *
- */
-@Guice(modules = TestModules.SoftDeleteModule.class)
-public class ReverseReferenceUpdateSoftDeleteTest extends ReverseReferenceUpdateTestBase {
- @Override
- void assertTestOneToOneReference(Object actual, ITypedReferenceableInstance expectedValue, ITypedReferenceableInstance referencingInstance) throws Exception {
- // Verify reference was not disconnected if soft deletes are enabled.
- Assert.assertNotNull(actual);
- Assert.assertTrue(actual instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance referenceValue = (ITypedReferenceableInstance) actual;
- Assert.assertEquals(referenceValue.getId()._getId(), expectedValue.getId()._getId());
-
- //Verify reference edge was marked as DELETED.
- AtlasVertex vertexForGUID = GraphHelper.getInstance().getVertexForGUID(referencingInstance.getId()._getId());
- String edgeLabel = GraphHelper.getEdgeLabel(typeB, typeB.fieldMapping.fields.get("a"));
- AtlasEdge edgeForLabel = GraphHelper.getInstance().getEdgeForLabel(vertexForGUID, edgeLabel);
- Assert.assertNotNull(edgeForLabel);
- String edgeState = edgeForLabel.getProperty(Constants.STATE_PROPERTY_KEY, String.class);
- Assert.assertEquals(edgeState, Id.EntityState.DELETED.name());
- }
-
- @Override
- void assertTestOneToManyReference(Object object, ITypedReferenceableInstance referencingInstance) throws Exception {
- // Verify reference was not disconnected if soft deletes are enabled.
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 2);
-
- // Verify that one of the reference edges is marked DELETED.
- AtlasVertex vertexForGUID = GraphHelper.getInstance().getVertexForGUID(referencingInstance.getId()._getId());
- String edgeLabel = GraphHelper.getEdgeLabel(typeB, typeB.fieldMapping.fields.get("manyA"));
- Iterator<AtlasEdge> outGoingEdgesByLabel = GraphHelper.getInstance().getOutGoingEdgesByLabel(vertexForGUID, edgeLabel);
- boolean found = false;
- while (outGoingEdgesByLabel.hasNext()) {
- AtlasEdge edge = outGoingEdgesByLabel.next();
- String edgeState = edge.getProperty(Constants.STATE_PROPERTY_KEY, String.class);
- if (edgeState.equals(Id.EntityState.DELETED.name())) {
- found = true;
- break;
- }
- }
- Assert.assertTrue(found, "One edge for label " + edgeLabel + " should be marked " + Id.EntityState.DELETED.name());
- }
-
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateTestBase.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateTestBase.java b/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateTestBase.java
deleted file mode 100644
index 8518f93..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/ReverseReferenceUpdateTestBase.java
+++ /dev/null
@@ -1,275 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import com.google.inject.Inject;
-import org.apache.atlas.CreateUpdateEntitiesResult;
-import org.apache.atlas.TestUtils;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.Assert;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Verifies automatic update of reverse references
- *
- */
-public abstract class ReverseReferenceUpdateTestBase {
- @Inject
- MetadataRepository repositoryService;
-
- private TypeSystem typeSystem;
-
- protected ClassType typeA;
- protected ClassType typeB;
-
- abstract void assertTestOneToOneReference(Object actual, ITypedReferenceableInstance expectedValue, ITypedReferenceableInstance referencingInstance) throws Exception;
- abstract void assertTestOneToManyReference(Object refValue, ITypedReferenceableInstance referencingInstance) throws Exception;
-
- @BeforeClass
- public void setUp() throws Exception {
- typeSystem = TypeSystem.getInstance();
- typeSystem.reset();
-
- HierarchicalTypeDefinition<ClassType> aDef = TypesUtil.createClassTypeDef("A", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("b", "B", Multiplicity.OPTIONAL, false, "a"), // 1-1
- new AttributeDefinition("oneB", "B", Multiplicity.OPTIONAL, false, "manyA"), // 1-*
- new AttributeDefinition("manyB", DataTypes.arrayTypeName("B"), Multiplicity.OPTIONAL, false, "manyToManyA"), // *-*
- new AttributeDefinition("map", DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "B"), Multiplicity.OPTIONAL, false, "backToMap"));
- HierarchicalTypeDefinition<ClassType> bDef = TypesUtil.createClassTypeDef("B", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("a", "A", Multiplicity.OPTIONAL, false, "b"),
- new AttributeDefinition("manyA", DataTypes.arrayTypeName("A"), Multiplicity.OPTIONAL, false, "oneB"),
- new AttributeDefinition("manyToManyA", DataTypes.arrayTypeName("A"), Multiplicity.OPTIONAL, false, "manyB"),
- new AttributeDefinition("backToMap", "A", Multiplicity.OPTIONAL, false, "map"));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(aDef, bDef));
- typeSystem.defineTypes(typesDef);
- typeA = typeSystem.getDataType(ClassType.class, "A");
- typeB = typeSystem.getDataType(ClassType.class, "B");
-
- repositoryService = TestUtils.addTransactionWrapper(repositoryService);
- }
-
- @BeforeMethod
- public void setupContext() {
- TestUtils.resetRequestContext();
- }
-
- @Test
- public void testOneToOneReference() throws Exception {
- ITypedReferenceableInstance a = typeA.createInstance();
- a.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b1 = typeB.createInstance();
- b1.setString("name", TestUtils.randomString());
- a.set("b", b1);
- // Create a. This should also create b1 and set the reverse b1->a reference.
- repositoryService.createEntities(a);
- a = repositoryService.getEntityDefinition("A", "name", a.getString("name"));
- b1 = repositoryService.getEntityDefinition("B", "name", b1.getString("name"));
- Object object = a.get("b");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance refValue = (ITypedReferenceableInstance) object;
- Assert.assertEquals(refValue.getId()._getId(), b1.getId()._getId());
- object = b1.get("a");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- refValue = (ITypedReferenceableInstance) object;
- Assert.assertEquals(refValue.getId()._getId(), a.getId()._getId());
-
- ITypedReferenceableInstance b2 = typeB.createInstance();
- b2.setString("name", TestUtils.randomString());
- b2.set("a", a.getId());
- // Create b2. This should set the reverse a->b2 reference
- // and disconnect b1->a.
- repositoryService.createEntities(b2);
- a = repositoryService.getEntityDefinition(a.getId()._getId());
- b2 = repositoryService.getEntityDefinition("B", "name", b2.getString("name"));
- object = a.get("b");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- refValue = (ITypedReferenceableInstance) object;
- Assert.assertEquals(refValue.getId()._getId(), b2.getId()._getId());
- object = b2.get("a");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- refValue = (ITypedReferenceableInstance) object;
- Assert.assertEquals(refValue.getId()._getId(), a.getId()._getId());
- // Verify b1->a was disconnected.
- b1 = repositoryService.getEntityDefinition("B", "name", b1.getString("name"));
- object = b1.get("a");
- assertTestOneToOneReference(object, a, b1);
- }
-
- @Test
- public void testOneToManyReference() throws Exception {
- ITypedReferenceableInstance a1 = typeA.createInstance();
- a1.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance a2 = typeA.createInstance();
- a2.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b1 = typeB.createInstance();
- b1.setString("name", TestUtils.randomString());
- a1.set("oneB", b1);
- ITypedReferenceableInstance b2 = typeB.createInstance();
- b2.setString("name", TestUtils.randomString());
- repositoryService.createEntities(a1, a2, b2);
- a1 = repositoryService.getEntityDefinition("A", "name", a1.getString("name"));
- a2 = repositoryService.getEntityDefinition("A", "name", a2.getString("name"));
- b1 = repositoryService.getEntityDefinition("B", "name", b1.getString("name"));
- b2 = repositoryService.getEntityDefinition("B", "name", b2.getString("name"));
- Object object = b1.get("manyA");
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 1);
- Assert.assertTrue(refValues.contains(a1.getId()));
-
- a2.set("oneB", b1.getId());
- repositoryService.updateEntities(a2);
- b1 = repositoryService.getEntityDefinition(b1.getId()._getId());
- object = b1.get("manyA");
- Assert.assertTrue(object instanceof List);
- refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 2);
- Assert.assertTrue(refValues.containsAll(Arrays.asList(a1.getId(), a2.getId())));
-
- b2.set("manyA", Collections.singletonList(a2));
- repositoryService.updateEntities(b2);
- a2 = repositoryService.getEntityDefinition("A", "name", a2.getString("name"));
-
- // Verify reverse a2.oneB reference was set to b2.
- object = a2.get("oneB");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- ITypedReferenceableInstance refValue = (ITypedReferenceableInstance) object;
- Assert.assertEquals(refValue.getId()._getId(), b2.getId()._getId());
-
- // Verify a2 was removed from b1.manyA reference list.
- b1 = repositoryService.getEntityDefinition(b1.getId()._getId());
- object = b1.get("manyA");
- assertTestOneToManyReference(object, b1);
- }
-
- @Test
- public void testManyToManyReference() throws Exception {
- ITypedReferenceableInstance a1 = typeA.createInstance();
- a1.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance a2 = typeA.createInstance();
- a2.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b1 = typeB.createInstance();
- b1.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b2 = typeB.createInstance();
- b2.setString("name", TestUtils.randomString());
- repositoryService.createEntities(a1, a2, b1, b2);
- a1 = repositoryService.getEntityDefinition("A", "name", a1.getString("name"));
- a2 = repositoryService.getEntityDefinition("A", "name", a2.getString("name"));
- b1 = repositoryService.getEntityDefinition("B", "name", b1.getString("name"));
- b2 = repositoryService.getEntityDefinition("B", "name", b2.getString("name"));
-
- // Update a1 to add b1 to its manyB reference.
- // This should update b1.manyToManyA.
- a1.set("manyB", Arrays.asList(b1.getId()));
- repositoryService.updateEntities(a1);
-
- // Verify reverse b1.manyToManyA reference was updated.
- b1 = repositoryService.getEntityDefinition(b1.getId()._getId());
- Object object = b1.get("manyToManyA");
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>) object;
- Assert.assertEquals(refValues.size(), 1);
- Assert.assertTrue(refValues.contains(a1.getId()));
- }
-
- /**
- * Auto-update of bi-directional references where one end is a map reference is
- * not currently supported. Verify that the auto-update is not applied in this case.
- */
- @Test
- public void testMapReference() throws Exception {
- ITypedReferenceableInstance a1 = typeA.createInstance();
- a1.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance a2 = typeA.createInstance();
- a2.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b1 = typeB.createInstance();
- b1.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b2 = typeB.createInstance();
- b2.setString("name", TestUtils.randomString());
- repositoryService.createEntities(a1, a2, b1, b2);
- a1 = repositoryService.getEntityDefinition("A", "name", a1.getString("name"));
- a2 = repositoryService.getEntityDefinition("A", "name", a2.getString("name"));
- b1 = repositoryService.getEntityDefinition("B", "name", b1.getString("name"));
- b2 = repositoryService.getEntityDefinition("B", "name", b2.getString("name"));
- a1.set("map", Collections.singletonMap("b1", b1));
- repositoryService.updateEntities(a1);
- // Verify reverse b1.manyToManyA reference was not updated.
- b1 = repositoryService.getEntityDefinition(b1.getId()._getId());
- Object object = b1.get("backToMap");
- Assert.assertNull(object);
- }
-
- /**
- * Verify that explicitly setting both ends of a reference
- * does not cause duplicate entries due to auto-update of
- * reverse reference.
- */
- @Test
- public void testCallerHasSetBothEnds() throws Exception {
- ITypedReferenceableInstance a = typeA.createInstance();
- a.setString("name", TestUtils.randomString());
- ITypedReferenceableInstance b1 = typeB.createInstance();
- b1.setString("name", TestUtils.randomString());
- // Set both sides of the reference.
- a.set("oneB", b1);
- b1.set("manyA", Collections.singletonList(a));
-
- CreateUpdateEntitiesResult result = repositoryService.createEntities(a);
- Map<String, String> guidAssignments = result.getGuidMapping().getGuidAssignments();
- String aGuid = a.getId()._getId();
- String b1Guid = guidAssignments.get(b1.getId()._getId());
-
- a = repositoryService.getEntityDefinition(aGuid);
- Object object = a.get("oneB");
- Assert.assertTrue(object instanceof ITypedReferenceableInstance);
- Assert.assertEquals(((ITypedReferenceableInstance)object).getId()._getId(), b1Guid);
-
- b1 = repositoryService.getEntityDefinition(b1Guid);
- object = b1.get("manyA");
- Assert.assertTrue(object instanceof List);
- List<ITypedReferenceableInstance> refValues = (List<ITypedReferenceableInstance>)object;
- Assert.assertEquals(refValues.size(), 1);
- Assert.assertEquals(refValues.get(0).getId()._getId(), aGuid);
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/graph/TestIntSequence.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/TestIntSequence.java b/repository/src/test/java/org/apache/atlas/repository/graph/TestIntSequence.java
deleted file mode 100644
index b8eefca..0000000
--- a/repository/src/test/java/org/apache/atlas/repository/graph/TestIntSequence.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import org.apache.atlas.query.IntSequence;
-
-/**
- * IntSequence for use in unit tests.
- *
- */
-public class TestIntSequence implements IntSequence {
-
- public static final IntSequence INSTANCE = new TestIntSequence();
- private TestIntSequence() {
- }
- @Override
- public int next() {
- return 0;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java b/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
index 7901ef6..2a8bdfa 100644
--- a/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
+++ b/repository/src/test/java/org/apache/atlas/repository/impexp/ExportServiceTest.java
@@ -45,13 +45,13 @@ import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
-import scala.actors.threadpool.Arrays;
import javax.inject.Inject;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
+import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
[03/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java
index d7c66d3..ff1751d 100755
--- a/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/MetadataDiscoveryResource.java
@@ -22,8 +22,6 @@ import com.google.common.base.Preconditions;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasConfiguration;
import org.apache.atlas.classification.InterfaceAudience;
-import org.apache.atlas.discovery.DiscoveryException;
-import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.query.QueryParams;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.utils.ParamChecker;
@@ -46,6 +44,7 @@ import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
@@ -65,8 +64,6 @@ public class MetadataDiscoveryResource {
private static final String QUERY_TYPE_FULLTEXT = "full-text";
private static final String LIMIT_OFFSET_DEFAULT = "-1";
- private final DiscoveryService discoveryService;
-
private final boolean gremlinSearchEnabled;
private static Configuration applicationProperties = null;
private static final String ENABLE_GREMLIN_SEARCH_PROPERTY = "atlas.search.gremlin.enable";
@@ -75,11 +72,10 @@ public class MetadataDiscoveryResource {
* Created by the Guice ServletModule and injected with the
* configured DiscoveryService.
*
- * @param discoveryService metadata service handle
+ * @param configuration configuration
*/
@Inject
- public MetadataDiscoveryResource(DiscoveryService discoveryService, Configuration configuration) {
- this.discoveryService = discoveryService;
+ public MetadataDiscoveryResource(Configuration configuration) {
applicationProperties = configuration;
gremlinSearchEnabled = applicationProperties != null && applicationProperties.getBoolean(ENABLE_GREMLIN_SEARCH_PROPERTY, false);
}
@@ -152,12 +148,12 @@ public class MetadataDiscoveryResource {
dslQuery = ParamChecker.notEmpty(dslQuery, "dslQuery cannot be null");
QueryParams queryParams = validateQueryParams(limit, offset);
- final String jsonResultStr = discoveryService.searchByDSL(dslQuery, queryParams);
+ final String jsonResultStr = ""; // TODO-typeSystem-removal: discoveryService.searchByDSL(dslQuery, queryParams);
JSONObject response = new DSLJSONResponseBuilder().results(jsonResultStr).query(dslQuery).build();
return Response.ok(response).build();
- } catch (DiscoveryException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for dslQuery {}", dslQuery, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -220,11 +216,11 @@ public class MetadataDiscoveryResource {
}
if (!gremlinSearchEnabled) {
- throw new DiscoveryException("Gremlin search is not enabled.");
+ throw new Exception("Gremlin search is not enabled.");
}
gremlinQuery = ParamChecker.notEmpty(gremlinQuery, "gremlinQuery cannot be null or empty");
- final List<Map<String, String>> results = discoveryService.searchByGremlin(gremlinQuery);
+ final List<Map<String, String>> results = new ArrayList<>(); // TODO-typeSystem-removal: discoveryService.searchByGremlin(gremlinQuery);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
@@ -239,7 +235,7 @@ public class MetadataDiscoveryResource {
response.put(AtlasClient.COUNT, list.length());
return Response.ok(response).build();
- } catch (DiscoveryException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for gremlinQuery {}", gremlinQuery, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -284,12 +280,12 @@ public class MetadataDiscoveryResource {
query = ParamChecker.notEmpty(query, "query cannot be null or empty");
QueryParams queryParams = validateQueryParams(limit, offset);
- final String jsonResultStr = discoveryService.searchByFullText(query, queryParams);
+ final String jsonResultStr = ""; // TODO-typeSystem-removal: discoveryService.searchByFullText(query, queryParams);
JSONArray rowsJsonArr = new JSONArray(jsonResultStr);
JSONObject response = new FullTextJSonResponseBuilder().results(rowsJsonArr).query(query).build();
return Response.ok(response).build();
- } catch (DiscoveryException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for query {}", query, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
index a9c5509..9b2d7b2 100755
--- a/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
@@ -23,10 +23,10 @@ import com.sun.jersey.api.core.ResourceContext;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.v1.model.typedef.TypesDef;
import org.apache.atlas.store.AtlasTypeDefStore;
+import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.repository.converters.TypeConverterUtil;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.web.rest.TypesREST;
@@ -237,7 +237,7 @@ public class TypesResource {
try {
TypesDef typesDef = TypeConverterUtil.toTypesDef(typeRegistry.getType(typeName), typeRegistry);;
- String typeDefinition = TypesSerialization.toJson(typesDef);
+ String typeDefinition = AtlasType.toV1Json(typesDef);
response.put(AtlasClient.TYPENAME, typeName);
response.put(AtlasClient.DEFINITION, new JSONObject(typeDefinition));
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java b/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
index c6b4a6f..ea8b738 100644
--- a/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
+++ b/webapp/src/main/java/org/apache/atlas/web/service/ActiveInstanceState.java
@@ -33,10 +33,10 @@ import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
-import scala.actors.threadpool.Arrays;
import javax.inject.Inject;
import java.nio.charset.Charset;
+import java.util.Arrays;
import java.util.List;
/**
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java b/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java
index ad2a697..562d9b7 100644
--- a/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java
+++ b/webapp/src/main/java/org/apache/atlas/web/util/LineageUtils.java
@@ -23,13 +23,11 @@ import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.lineage.AtlasLineageInfo;
import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
-import org.apache.atlas.model.typedef.AtlasEntityDef;
+import org.apache.atlas.repository.Constants;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.v1.model.instance.Struct;
import java.util.ArrayList;
import java.util.HashMap;
@@ -38,21 +36,19 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
-import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.ATLAS_TYPE_ARRAY_PREFIX;
-import static org.apache.atlas.model.typedef.AtlasBaseTypeDef.ATLAS_TYPE_ARRAY_SUFFIX;
public final class LineageUtils {
private LineageUtils() {}
- private static final String VERTICES_ATTR_NAME = "vertices";
- private static final String EDGES_ATTR_NAME = "edges";
private static final String VERTEX_ID_ATTR_NAME = "vertexId";
private static final String TEMP_STRUCT_ID_RESULT = "__IdType";
private static final AtomicInteger COUNTER = new AtomicInteger();
- public static String toLineageStruct(AtlasLineageInfo lineageInfo, AtlasTypeRegistry registry) throws AtlasBaseException {
- String ret = null;
+ public static Struct toLineageStruct(AtlasLineageInfo lineageInfo, AtlasTypeRegistry registry) throws AtlasBaseException {
+ Struct ret = new Struct();
+
+ ret.setTypeName(Constants.TEMP_STRUCT_NAME_PREFIX + COUNTER.getAndIncrement());
if (lineageInfo != null) {
Map<String, AtlasEntityHeader> entities = lineageInfo.getGuidEntityMap();
@@ -66,11 +62,10 @@ public final class LineageUtils {
if (isDataSet(entityHeader.getTypeName(), registry)) {
Map<String, Object> vertexIdMap = new HashMap<>();
- TypeSystem.IdType idType = TypeSystem.getInstance().getIdType();
- vertexIdMap.put(idType.idAttrName(), guid);
- vertexIdMap.put(idType.stateAttrName(), (entityHeader.getStatus() == AtlasEntity.Status.ACTIVE) ? "ACTIVE" : "DELETED");
- vertexIdMap.put(idType.typeNameAttrName(), entityHeader.getTypeName());
+ vertexIdMap.put(Constants.ATTRIBUTE_NAME_GUID, guid);
+ vertexIdMap.put(Constants.ATTRIBUTE_NAME_STATE, (entityHeader.getStatus() == AtlasEntity.Status.ACTIVE) ? "ACTIVE" : "DELETED");
+ vertexIdMap.put(Constants.ATTRIBUTE_NAME_TYPENAME, entityHeader.getTypeName());
Object qualifiedName = entityHeader.getAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
if (qualifiedName == null) {
@@ -106,11 +101,8 @@ public final class LineageUtils {
}
}
- Map<String, Object> map = new HashMap<>();
- map.put(VERTICES_ATTR_NAME, verticesMap);
- map.put(EDGES_ATTR_NAME, edgesMap);
-
- ret = InstanceSerialization.toJson(constructResultStruct(map, false), false);
+ ret.set("vertices", verticesMap);
+ ret.set("edges", edgesMap);
}
return ret;
@@ -121,7 +113,7 @@ public final class LineageUtils {
return new Struct(TEMP_STRUCT_ID_RESULT, values);
}
- return new Struct(org.apache.atlas.query.TypeUtils.TEMP_STRUCT_NAME_PREFIX() + COUNTER.getAndIncrement(), values);
+ return new Struct(Constants.TEMP_STRUCT_NAME_PREFIX + COUNTER.getAndIncrement(), values);
}
private static boolean isDataSet(String typeName, AtlasTypeRegistry registry) throws AtlasBaseException {
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java b/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
index 592c2a6..1b5e811 100644
--- a/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
+++ b/webapp/src/test/java/org/apache/atlas/examples/QuickStartIT.java
@@ -20,8 +20,8 @@ package org.apache.atlas.examples;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.web.integration.BaseResourceIT;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/AdaptiveWaiterTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/AdaptiveWaiterTest.java b/webapp/src/test/java/org/apache/atlas/notification/AdaptiveWaiterTest.java
index 3b4ba02..e65d678 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/AdaptiveWaiterTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/AdaptiveWaiterTest.java
@@ -26,7 +26,7 @@ public class AdaptiveWaiterTest {
private final int maxDuration = 100;
private final int minDuration = 5;
- private final int increment = 5;
+ private final int increment = 5;
private NotificationHookConsumer.AdaptiveWaiter waiter;
@BeforeClass
@@ -36,11 +36,13 @@ public class AdaptiveWaiterTest {
@Test
public void basicTest() {
- for (int i = 0; i < 20; i++) {
+ int pauseCount = 10;
+
+ for (int i = 0; i < pauseCount; i++) {
waiter.pause(new IllegalStateException());
}
- assertEquals(waiter.waitDuration, 95);
+ assertEquals(waiter.waitDuration, Math.min((pauseCount + 1) * minDuration, maxDuration)); // waiter.waitDuration will be set to wait time for next pause()
}
@Test
@@ -63,6 +65,6 @@ public class AdaptiveWaiterTest {
}
waiter.pause(new IllegalArgumentException());
- assertEquals(waiter.waitDuration, 5);
+ assertEquals(waiter.waitDuration, minDuration);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java b/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
index 7e94330..486b30b 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/EntityNotificationIT.java
@@ -18,25 +18,21 @@
package org.apache.atlas.notification;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.kafka.NotificationProvider;
-import org.apache.atlas.notification.entity.EntityNotification;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization$;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1.OperationType;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.atlas.web.integration.BaseResourceIT;
import org.testng.annotations.BeforeClass;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
+
+import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
@@ -46,33 +42,35 @@ import static org.testng.Assert.assertTrue;
* Entity Notification Integration Tests.
*/
public class EntityNotificationIT extends BaseResourceIT {
-
- private final String DATABASE_NAME = "db" + randomString();
- private final String TABLE_NAME = "table" + randomString();
- private NotificationInterface notificationInterface = NotificationProvider.get();
- private Id tableId;
- private Id dbId;
- private String traitName;
- private NotificationConsumer notificationConsumer;
+ private final String DATABASE_NAME = "db" + randomString();
+ private final String TABLE_NAME = "table" + randomString();
+ private final NotificationInterface notificationInterface = NotificationProvider.get();
+ private Id tableId;
+ private Id dbId;
+ private String traitName;
+ private NotificationConsumer notificationConsumer;
@BeforeClass
public void setUp() throws Exception {
super.setUp();
+
createTypeDefinitionsV1();
+
Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(DATABASE_NAME);
+
dbId = createInstance(HiveDBInstance);
- notificationConsumer = notificationInterface.createConsumers(NotificationInterface.NotificationType.ENTITIES, 1).get(0);
+ notificationConsumer = notificationInterface.createConsumers(NotificationType.ENTITIES, 1).get(0);
}
public void testCreateEntity() throws Exception {
Referenceable tableInstance = createHiveTableInstanceBuiltIn(DATABASE_NAME, TABLE_NAME, dbId);
+
tableId = createInstance(tableInstance);
final String guid = tableId._getId();
- waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
+ waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
public void testUpdateEntity() throws Exception {
@@ -83,83 +81,83 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.updateEntityAttribute(guid, property, newValue);
- waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE_BUILTIN, guid));
+ waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_UPDATE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
public void testDeleteEntity() throws Exception {
- final String tableName = "table-" + randomString();
- final String dbName = "db-" + randomString();
- Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName);
- Id dbId = createInstance(HiveDBInstance);
+ final String tableName = "table-" + randomString();
+ final String dbName = "db-" + randomString();
+ final Referenceable HiveDBInstance = createHiveDBInstanceBuiltIn(dbName);
+ final Id dbId = createInstance(HiveDBInstance);
+ final Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId);
+ final Id tableId = createInstance(tableInstance);
+ final String guid = tableId._getId();
- Referenceable tableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId);
- final Id tableId = createInstance(tableInstance);
- final String guid = tableId._getId();
-
- waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
+ waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_CREATE, HIVE_TABLE_TYPE_BUILTIN, guid));
final String name = (String) tableInstance.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME);
atlasClientV1.deleteEntity(HIVE_TABLE_TYPE_BUILTIN, AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
- waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
+ waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.ENTITY_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
}
public void testAddTrait() throws Exception {
String superSuperTraitName = "SuperTrait" + randomString();
- createTrait(superSuperTraitName);
-
- String superTraitName = "SuperTrait" + randomString();
- createTrait(superTraitName, superSuperTraitName);
+ String superTraitName = "SuperTrait" + randomString();
traitName = "Trait" + randomString();
+
+ createTrait(superSuperTraitName);
+ createTrait(superTraitName, superSuperTraitName);
createTrait(traitName, superTraitName);
- Struct traitInstance = new Struct(traitName);
- String traitInstanceJSON = InstanceSerialization.toJson(traitInstance, true);
+ Struct traitInstance = new Struct(traitName);
+ String traitInstanceJSON = AtlasType.toV1Json(traitInstance);
+
LOG.debug("Trait instance = {}", traitInstanceJSON);
final String guid = tableId._getId();
atlasClientV1.addTrait(guid, traitInstance);
- EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
+ EntityNotificationV1 entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
+
+ Referenceable entity = entityNotification.getEntity();
- IReferenceableInstance entity = entityNotification.getEntity();
- assertTrue(entity.getTraits().contains(traitName));
+ assertTrue(entity.getTraitNames().contains(traitName));
- List<IStruct> allTraits = entityNotification.getAllTraits();
+ List<Struct> allTraits = entityNotification.getAllTraits();
List<String> allTraitNames = new LinkedList<>();
- for (IStruct struct : allTraits) {
+ for (Struct struct : allTraits) {
allTraitNames.add(struct.getTypeName());
}
+
assertTrue(allTraitNames.contains(traitName));
assertTrue(allTraitNames.contains(superTraitName));
assertTrue(allTraitNames.contains(superSuperTraitName));
String anotherTraitName = "Trait" + randomString();
+
createTrait(anotherTraitName, superTraitName);
- traitInstance = new Struct(anotherTraitName);
- traitInstanceJSON = InstanceSerialization.toJson(traitInstance, true);
+ traitInstance = new Struct(anotherTraitName);
+ traitInstanceJSON = AtlasType.toV1Json(traitInstance);
+
LOG.debug("Trait instance = {}", traitInstanceJSON);
atlasClientV1.addTrait(guid, traitInstance);
- entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
+ entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME, newNotificationPredicate(OperationType.TRAIT_ADD, HIVE_TABLE_TYPE_BUILTIN, guid));
- allTraits = entityNotification.getAllTraits();
+ allTraits = entityNotification.getAllTraits();
allTraitNames = new LinkedList<>();
- for (IStruct struct : allTraits) {
+ for (Struct struct : allTraits) {
allTraitNames.add(struct.getTypeName());
}
+
assertTrue(allTraitNames.contains(traitName));
assertTrue(allTraitNames.contains(anotherTraitName));
// verify that the super type shows up twice in all traits
@@ -171,21 +169,25 @@ public class EntityNotificationIT extends BaseResourceIT {
atlasClientV1.deleteTrait(guid, traitName);
- EntityNotification entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
- newNotificationPredicate(EntityNotification.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
+ EntityNotificationV1 entityNotification = waitForNotification(notificationConsumer, MAX_WAIT_TIME,
+ newNotificationPredicate(EntityNotificationV1.OperationType.TRAIT_DELETE, HIVE_TABLE_TYPE_BUILTIN, guid));
- assertFalse(entityNotification.getEntity().getTraits().contains(traitName));
+ assertFalse(entityNotification.getEntity().getTraitNames().contains(traitName));
}
// ----- helper methods ---------------------------------------------------
private void createTrait(String traitName, String ... superTraitNames) throws Exception {
- HierarchicalTypeDefinition<TraitType> trait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.copyOf(superTraitNames));
+ TraitTypeDefinition traitDef = TypesUtil.createTraitTypeDef(traitName, null, new HashSet<>(Arrays.asList(superTraitNames)));
+ TypesDef typesDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(),
+ Collections.<StructTypeDefinition>emptyList(),
+ Collections.singletonList(traitDef),
+ Collections.<ClassTypeDefinition>emptyList());
+ String traitDefinitionJSON = AtlasType.toV1Json(typesDef);
- String traitDefinitionJSON = TypesSerialization$.MODULE$.toJson(trait, true);
LOG.debug("Trait definition = {}", traitDefinitionJSON);
+
createType(traitDefinitionJSON);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/NotificationEntityChangeListenerTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationEntityChangeListenerTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationEntityChangeListenerTest.java
index a988915..084ebb1 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationEntityChangeListenerTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationEntityChangeListenerTest.java
@@ -18,11 +18,10 @@
package org.apache.atlas.notification;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasClassificationType;
+import org.apache.atlas.type.AtlasTypeRegistry;
import org.testng.annotations.Test;
import java.util.Collections;
@@ -41,45 +40,45 @@ public class NotificationEntityChangeListenerTest {
@Test
public void testGetAllTraitsSuperTraits() throws Exception {
- TypeSystem typeSystem = mock(TypeSystem.class);
+ AtlasTypeRegistry typeSystem = mock(AtlasTypeRegistry.class);
String traitName = "MyTrait";
- IStruct myTrait = new Struct(traitName);
+ Struct myTrait = new Struct(traitName);
String superTraitName = "MySuperTrait";
- TraitType traitDef = mock(TraitType.class);
+ AtlasClassificationType traitDef = mock(AtlasClassificationType.class);
Set<String> superTypeNames = Collections.singleton(superTraitName);
- TraitType superTraitDef = mock(TraitType.class);
+ AtlasClassificationType superTraitDef = mock(AtlasClassificationType.class);
Set<String> superSuperTypeNames = Collections.emptySet();
Referenceable entity = getEntity("id", myTrait);
- when(typeSystem.getDataType(TraitType.class, traitName)).thenReturn(traitDef);
- when(typeSystem.getDataType(TraitType.class, superTraitName)).thenReturn(superTraitDef);
+ when(typeSystem.getClassificationTypeByName(traitName)).thenReturn(traitDef);
+ when(typeSystem.getClassificationTypeByName(superTraitName)).thenReturn(superTraitDef);
- when(traitDef.getAllSuperTypeNames()).thenReturn(superTypeNames);
- when(superTraitDef.getAllSuperTypeNames()).thenReturn(superSuperTypeNames);
+ when(traitDef.getAllSuperTypes()).thenReturn(superTypeNames);
+ when(superTraitDef.getAllSuperTypes()).thenReturn(superSuperTypeNames);
- List<IStruct> allTraits = NotificationEntityChangeListener.getAllTraits(entity, typeSystem);
+ List<Struct> allTraits = NotificationEntityChangeListener.getAllTraits(entity, typeSystem);
assertEquals(2, allTraits.size());
- for (IStruct trait : allTraits) {
+ for (Struct trait : allTraits) {
String typeName = trait.getTypeName();
assertTrue(typeName.equals(traitName) || typeName.equals(superTraitName));
}
}
- private Referenceable getEntity(String id, IStruct... traits) {
+ private Referenceable getEntity(String id, Struct... traits) {
String typeName = "typeName";
Map<String, Object> values = new HashMap<>();
List<String> traitNames = new LinkedList<>();
- Map<String, IStruct> traitMap = new HashMap<>();
+ Map<String, Struct> traitMap = new HashMap<>();
- for (IStruct trait : traits) {
+ for (Struct trait : traits) {
String traitName = trait.getTypeName();
traitNames.add(traitName);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
index d41db3e..f248593 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerIT.java
@@ -20,14 +20,13 @@ package org.apache.atlas.notification;
import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.kafka.NotificationProvider;
-import org.apache.atlas.notification.hook.HookNotification;
-import org.apache.atlas.notification.hook.HookNotification.HookNotificationMessage;
-import org.apache.atlas.notification.hook.HookNotification.EntityDeleteRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityPartialUpdateRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityCreateRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityUpdateRequest;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityPartialUpdateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
import org.apache.atlas.web.integration.BaseResourceIT;
import org.codehaus.jettison.json.JSONArray;
import org.testng.annotations.AfterClass;
@@ -40,18 +39,19 @@ import static java.lang.Thread.sleep;
import static org.testng.Assert.assertEquals;
public class NotificationHookConsumerIT extends BaseResourceIT {
-
private static final String TEST_USER = "testuser";
- public static final String NAME = "name";
- public static final String DESCRIPTION = "description";
+
+ public static final String NAME = "name";
+ public static final String DESCRIPTION = "description";
public static final String QUALIFIED_NAME = "qualifiedName";
- public static final String CLUSTER_NAME = "clusterName";
+ public static final String CLUSTER_NAME = "clusterName";
- private NotificationInterface notificationInterface = NotificationProvider.get();
+ private final NotificationInterface notificationInterface = NotificationProvider.get();
@BeforeClass
public void setUp() throws Exception {
super.setUp();
+
createTypeDefinitionsV1();
}
@@ -60,29 +60,33 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
notificationInterface.close();
}
- private void sendHookMessage(HookNotificationMessage message) throws NotificationException, InterruptedException {
+ private void sendHookMessage(HookNotification message) throws NotificationException, InterruptedException {
notificationInterface.send(NotificationInterface.NotificationType.HOOK, message);
+
sleep(1000);
}
@Test
public void testMessageHandleFailureConsumerContinues() throws Exception {
//send invalid message - update with invalid type
- sendHookMessage(new HookNotification.EntityPartialUpdateRequest(TEST_USER, randomString(), null, null,
- new Referenceable(randomString())));
+ sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, randomString(), null, null, new Referenceable(randomString())));
//send valid message
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- String dbName = "db" + randomString();
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
entity.set(CLUSTER_NAME, randomString());
+
sendHookMessage(new EntityCreateRequest(TEST_USER, entity));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_BUILTIN, entity.get(NAME)));
+
return results.length() == 1;
}
});
@@ -91,24 +95,28 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testCreateEntity() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- String dbName = "db" + randomString();
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
entity.set(CLUSTER_NAME, randomString());
sendHookMessage(new EntityCreateRequest(TEST_USER, entity));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, entity.get(QUALIFIED_NAME)));
+
return results.length() == 1;
}
});
//Assert that user passed in hook message is used in audit
- Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME));
- List<EntityAuditEvent> events = atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1);
+ Referenceable instance = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, (String) entity.get(QUALIFIED_NAME));
+ List<EntityAuditEvent> events = atlasClientV1.getEntityAuditEvents(instance.getId()._getId(), (short) 1);
+
assertEquals(events.size(), 1);
assertEquals(events.get(0).getUser(), TEST_USER);
}
@@ -116,7 +124,8 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testUpdateEntityPartial() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- final String dbName = "db" + randomString();
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
@@ -125,25 +134,31 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
+
newEntity.set("owner", randomString());
+
sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
Referenceable localEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
+
return (localEntity.get("owner") != null && localEntity.get("owner").equals(newEntity.get("owner")));
}
});
//Its partial update and un-set fields are not updated
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
+
assertEquals(actualEntity.get(DESCRIPTION), entity.get(DESCRIPTION));
}
@Test
public void testUpdatePartialUpdatingQualifiedName() throws Exception {
final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- final String dbName = "db" + randomString();
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
@@ -152,28 +167,32 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
- final String newName = "db" + randomString();
+ final String newName = "db" + randomString();
+
newEntity.set(QUALIFIED_NAME, newName);
sendHookMessage(new EntityPartialUpdateRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName, newEntity));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newName));
+
return results.length() == 1;
}
});
//no entity with the old qualified name
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, dbName));
- assertEquals(results.length(), 0);
+ assertEquals(results.length(), 0);
}
@Test
public void testDeleteByQualifiedName() throws Exception {
- Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- final String dbName = "db" + randomString();
+ final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
@@ -182,10 +201,12 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
final String dbId = atlasClientV1.createEntity(entity).get(0);
sendHookMessage(new EntityDeleteRequest(TEST_USER, DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
Referenceable getEntity = atlasClientV1.getEntity(dbId);
+
return getEntity.getId().getState() == Id.EntityState.DELETED;
}
});
@@ -193,8 +214,9 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
@Test
public void testUpdateEntityFullUpdate() throws Exception {
- Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
- final String dbName = "db" + randomString();
+ final Referenceable entity = new Referenceable(DATABASE_TYPE_BUILTIN);
+ final String dbName = "db" + randomString();
+
entity.set(NAME, dbName);
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, dbName);
@@ -203,6 +225,7 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
atlasClientV1.createEntity(entity);
final Referenceable newEntity = new Referenceable(DATABASE_TYPE_BUILTIN);
+
newEntity.set(NAME, randomString());
newEntity.set(DESCRIPTION, randomString());
newEntity.set("owner", randomString());
@@ -211,18 +234,19 @@ public class NotificationHookConsumerIT extends BaseResourceIT {
//updating unique attribute
sendHookMessage(new EntityUpdateRequest(TEST_USER, newEntity));
+
waitFor(MAX_WAIT_TIME, new Predicate() {
@Override
public boolean evaluate() throws Exception {
JSONArray results = searchByDSL(String.format("%s where qualifiedName='%s'", DATABASE_TYPE_BUILTIN, newEntity.get(QUALIFIED_NAME)));
+
return results.length() == 1;
}
});
Referenceable actualEntity = atlasClientV1.getEntity(DATABASE_TYPE_BUILTIN, QUALIFIED_NAME, dbName);
+
assertEquals(actualEntity.get(DESCRIPTION), newEntity.get(DESCRIPTION));
assertEquals(actualEntity.get("owner"), newEntity.get("owner"));
}
-
-
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
index eb37fa8..4ea13c7 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerKafkaTest.java
@@ -25,14 +25,15 @@ import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.kafka.AtlasKafkaMessage;
import org.apache.atlas.kafka.KafkaNotification;
import org.apache.atlas.kafka.NotificationProvider;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v1.EntityStream;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.web.service.ServiceState;
import org.apache.commons.lang.RandomStringUtils;
import org.mockito.Mock;
@@ -41,7 +42,7 @@ import org.testng.Assert;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
-import static org.apache.atlas.notification.hook.HookNotification.HookNotificationMessage;
+
import java.util.List;
import org.apache.atlas.kafka.AtlasKafkaConsumer;
@@ -57,11 +58,11 @@ import static org.testng.Assert.*;
public class NotificationHookConsumerKafkaTest {
-
- public static final String NAME = "name";
- public static final String DESCRIPTION = "description";
+ public static final String NAME = "name";
+ public static final String DESCRIPTION = "description";
public static final String QUALIFIED_NAME = "qualifiedName";
- private NotificationInterface notificationInterface = NotificationProvider.get();
+
+ private final NotificationInterface notificationInterface = NotificationProvider.get();
@Mock
@@ -81,10 +82,14 @@ public class NotificationHookConsumerKafkaTest {
@BeforeTest
public void setup() throws AtlasException, InterruptedException, AtlasBaseException {
MockitoAnnotations.initMocks(this);
- AtlasType mockType = mock(AtlasType.class);
+
+ AtlasType mockType = mock(AtlasType.class);
+ AtlasEntitiesWithExtInfo mockEntity = mock(AtlasEntitiesWithExtInfo.class);
+
when(typeRegistry.getType(anyString())).thenReturn(mockType);
- AtlasEntity.AtlasEntitiesWithExtInfo mockEntity = mock(AtlasEntity.AtlasEntitiesWithExtInfo.class);
+
when(instanceConverter.toAtlasEntities(anyList())).thenReturn(mockEntity);
+
kafkaNotification = startKafkaServer();
}
@@ -97,19 +102,20 @@ public class NotificationHookConsumerKafkaTest {
@Test
public void testConsumerConsumesNewMessageWithAutoCommitDisabled() throws AtlasException, InterruptedException, AtlasBaseException {
try {
- produceMessage(new HookNotification.EntityCreateRequest("test_user1", createEntity()));
+ produceMessage(new HookNotificationV1.EntityCreateRequest("test_user1", createEntity()));
- NotificationConsumer<HookNotificationMessage> consumer = createNewConsumer(kafkaNotification, false);
- NotificationHookConsumer notificationHookConsumer =
- new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
+ NotificationConsumer<HookNotification> consumer = createNewConsumer(kafkaNotification, false);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
consumeOneMessage(consumer, hookConsumer);
+
verify(atlasEntityStore).createOrUpdate(any(EntityStream.class), anyBoolean());
// produce another message, and make sure it moves ahead. If commit succeeded, this would work.
- produceMessage(new HookNotification.EntityCreateRequest("test_user2", createEntity()));
+ produceMessage(new HookNotificationV1.EntityCreateRequest("test_user2", createEntity()));
consumeOneMessage(consumer, hookConsumer);
+
verify(atlasEntityStore,times(2)).createOrUpdate(any(EntityStream.class), anyBoolean());
reset(atlasEntityStore);
}
@@ -121,22 +127,20 @@ public class NotificationHookConsumerKafkaTest {
@Test(dependsOnMethods = "testConsumerConsumesNewMessageWithAutoCommitDisabled")
public void testConsumerRemainsAtSameMessageWithAutoCommitEnabled() throws Exception {
try {
- produceMessage(new HookNotification.EntityCreateRequest("test_user3", createEntity()));
+ produceMessage(new HookNotificationV1.EntityCreateRequest("test_user3", createEntity()));
- NotificationConsumer<HookNotificationMessage> consumer = createNewConsumer(kafkaNotification, true);
+ NotificationConsumer<HookNotification> consumer = createNewConsumer(kafkaNotification, true);
assertNotNull (consumer);
- NotificationHookConsumer notificationHookConsumer =
- new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
-
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
consumeOneMessage(consumer, hookConsumer);
verify(atlasEntityStore).createOrUpdate(any(EntityStream.class), anyBoolean());
// produce another message, but this will not be consumed, as commit code is not executed in hook consumer.
- produceMessage(new HookNotification.EntityCreateRequest("test_user4", createEntity()));
+ produceMessage(new HookNotificationV1.EntityCreateRequest("test_user4", createEntity()));
consumeOneMessage(consumer, hookConsumer);
verify(atlasEntityStore,times(2)).createOrUpdate(any(EntityStream.class), anyBoolean());
@@ -146,18 +150,19 @@ public class NotificationHookConsumerKafkaTest {
}
}
- AtlasKafkaConsumer<HookNotificationMessage> createNewConsumer(KafkaNotification kafkaNotification, boolean autoCommitEnabled) {
+ AtlasKafkaConsumer<HookNotification> createNewConsumer(KafkaNotification kafkaNotification, boolean autoCommitEnabled) {
return (AtlasKafkaConsumer) kafkaNotification.createConsumers(NotificationInterface.NotificationType.HOOK, 1, autoCommitEnabled).get(0);
}
- void consumeOneMessage(NotificationConsumer<HookNotificationMessage> consumer,
+ void consumeOneMessage(NotificationConsumer<HookNotification> consumer,
NotificationHookConsumer.HookConsumer hookConsumer) throws InterruptedException {
try {
long startTime = System.currentTimeMillis(); //fetch starting time
+
while ((System.currentTimeMillis() - startTime) < 10000) {
- List<AtlasKafkaMessage<HookNotificationMessage>> messages = consumer.receive();
+ List<AtlasKafkaMessage<HookNotification>> messages = consumer.receive();
- for (AtlasKafkaMessage<HookNotificationMessage> msg : messages) {
+ for (AtlasKafkaMessage<HookNotification> msg : messages) {
hookConsumer.handleMessage(msg);
}
@@ -172,19 +177,25 @@ public class NotificationHookConsumerKafkaTest {
Referenceable createEntity() {
final Referenceable entity = new Referenceable(AtlasClient.DATA_SET_SUPER_TYPE);
+
entity.set(NAME, "db" + randomString());
entity.set(DESCRIPTION, randomString());
entity.set(QUALIFIED_NAME, randomString());
+
return entity;
}
KafkaNotification startKafkaServer() throws AtlasException, InterruptedException {
Configuration applicationProperties = ApplicationProperties.get();
+
applicationProperties.setProperty("atlas.kafka.data", "target/" + RandomStringUtils.randomAlphanumeric(5));
kafkaNotification = new KafkaNotification(applicationProperties);
+
kafkaNotification.start();
+
Thread.sleep(2000);
+
return kafkaNotification;
}
@@ -192,8 +203,7 @@ public class NotificationHookConsumerKafkaTest {
return RandomStringUtils.randomAlphanumeric(10);
}
- private void produceMessage(HookNotificationMessage message) throws NotificationException {
+ private void produceMessage(HookNotification message) throws NotificationException {
kafkaNotification.send(NotificationInterface.NotificationType.HOOK, message);
}
-
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
index e2d1022..f8bd9a1 100644
--- a/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
+++ b/webapp/src/test/java/org/apache/atlas/notification/NotificationHookConsumerTest.java
@@ -22,15 +22,17 @@ import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.ha.HAConfiguration;
import org.apache.atlas.kafka.AtlasKafkaMessage;
-import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import org.apache.atlas.model.instance.EntityMutationResponse;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.model.notification.HookNotification.HookNotificationType;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v1.EntityStream;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.web.service.ServiceState;
import org.apache.commons.configuration.Configuration;
import org.apache.kafka.common.TopicPartition;
@@ -43,6 +45,7 @@ import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutorService;
@@ -75,20 +78,24 @@ public class NotificationHookConsumerTest {
@BeforeMethod
public void setup() throws AtlasBaseException {
MockitoAnnotations.initMocks(this);
- AtlasType mockType = mock(AtlasType.class);
+
+ AtlasType mockType = mock(AtlasType.class);
+ AtlasEntitiesWithExtInfo mockEntity = mock(AtlasEntitiesWithExtInfo.class);
+
when(typeRegistry.getType(anyString())).thenReturn(mockType);
- AtlasEntity.AtlasEntitiesWithExtInfo mockEntity = mock(AtlasEntity.AtlasEntitiesWithExtInfo.class);
when(instanceConverter.toAtlasEntities(anyList())).thenReturn(mockEntity);
+
EntityMutationResponse mutationResponse = mock(EntityMutationResponse.class);
+
when(atlasEntityStore.createOrUpdate(any(EntityStream.class), anyBoolean())).thenReturn(mutationResponse);
}
@Test
public void testConsumerCanProceedIfServerIsReady() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationHookConsumer.HookConsumer hookConsumer =
- notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
- NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
+ NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
+
when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
assertTrue(hookConsumer.serverAvailable(timer));
@@ -98,10 +105,9 @@ public class NotificationHookConsumerTest {
@Test
public void testConsumerWaitsNTimesIfServerIsNotReadyNTimes() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationHookConsumer.HookConsumer hookConsumer =
- notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
- NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
+ NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
when(serviceState.getState())
.thenReturn(ServiceState.ServiceStateValue.PASSIVE)
@@ -116,35 +122,30 @@ public class NotificationHookConsumerTest {
@Test
public void testCommitIsCalledWhenMessageIsProcessed() throws AtlasServiceException, AtlasException {
- NotificationHookConsumer notificationHookConsumer =
- new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationConsumer consumer = mock(NotificationConsumer.class);
- NotificationHookConsumer.HookConsumer hookConsumer =
- notificationHookConsumer.new HookConsumer(consumer);
- HookNotification.EntityCreateRequest message = mock(HookNotification.EntityCreateRequest.class);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationConsumer consumer = mock(NotificationConsumer.class);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
+ EntityCreateRequest message = mock(EntityCreateRequest.class);
+ Referenceable mock = mock(Referenceable.class);
+
when(message.getUser()).thenReturn("user");
- when(message.getType()).thenReturn(HookNotification.HookNotificationType.ENTITY_CREATE);
- Referenceable mock = mock(Referenceable.class);
+ when(message.getType()).thenReturn(HookNotificationType.ENTITY_CREATE);
when(message.getEntities()).thenReturn(Arrays.asList(mock));
hookConsumer.handleMessage(new AtlasKafkaMessage(message, -1, -1));
+
verify(consumer).commit(any(TopicPartition.class), anyInt());
}
@Test
public void testCommitIsNotCalledEvenWhenMessageProcessingFails() throws AtlasServiceException, AtlasException, AtlasBaseException {
- NotificationHookConsumer notificationHookConsumer =
- new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationConsumer consumer = mock(NotificationConsumer.class);
- NotificationHookConsumer.HookConsumer hookConsumer =
- notificationHookConsumer.new HookConsumer(consumer);
- HookNotification.EntityCreateRequest message = new HookNotification.EntityCreateRequest("user",
- new ArrayList<Referenceable>() {
- {
- add(mock(Referenceable.class));
- }
- });
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationConsumer consumer = mock(NotificationConsumer.class);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(consumer);
+ EntityCreateRequest message = new EntityCreateRequest("user", Collections.singletonList(mock(Referenceable.class)));
+
when(atlasEntityStore.createOrUpdate(any(EntityStream.class), anyBoolean())).thenThrow(new RuntimeException("Simulating exception in processing message"));
+
hookConsumer.handleMessage(new AtlasKafkaMessage(message, -1, -1));
verifyZeroInteractions(consumer);
@@ -152,10 +153,10 @@ public class NotificationHookConsumerTest {
@Test
public void testConsumerProceedsWithFalseIfInterrupted() throws Exception {
- NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
- NotificationHookConsumer.HookConsumer hookConsumer =
- notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
- NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
+ NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+ NotificationHookConsumer.HookConsumer hookConsumer = notificationHookConsumer.new HookConsumer(mock(NotificationConsumer.class));
+ NotificationHookConsumer.Timer timer = mock(NotificationHookConsumer.Timer.class);
+
doThrow(new InterruptedException()).when(timer).sleep(NotificationHookConsumer.SERVER_READY_WAIT_TIME_MS);
when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.PASSIVE);
@@ -164,58 +165,75 @@ public class NotificationHookConsumerTest {
@Test
public void testConsumersStartedIfHAIsDisabled() throws Exception {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(false);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- consumers.add(mock(NotificationConsumer.class));
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).
- thenReturn(consumers);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
+
NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+
notificationHookConsumer.startInternal(configuration, executorService);
- verify(notificationInterface).createConsumers(NotificationInterface.NotificationType.HOOK, 1);
+
+ verify(notificationInterface).createConsumers(NotificationType.HOOK, 1);
verify(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
}
@Test
public void testConsumersAreNotStartedIfHAIsEnabled() throws Exception {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- consumers.add(mock(NotificationConsumer.class));
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).
- thenReturn(consumers);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
+
NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+
notificationHookConsumer.startInternal(configuration, executorService);
+
verifyZeroInteractions(notificationInterface);
}
@Test
public void testConsumersAreStartedWhenInstanceBecomesActive() throws Exception {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(configuration.containsKey(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- consumers.add(mock(NotificationConsumer.class));
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).
- thenReturn(consumers);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
+
NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
+
notificationHookConsumer.startInternal(configuration, executorService);
notificationHookConsumer.instanceIsActive();
- verify(notificationInterface).createConsumers(NotificationInterface.NotificationType.HOOK, 1);
+
+ verify(notificationInterface).createConsumers(NotificationType.HOOK, 1);
verify(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
}
@Test
public void testConsumersAreStoppedWhenInstanceBecomesPassive() throws Exception {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
- consumers.add(notificationConsumerMock);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).thenReturn(consumers);
final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
doAnswer(new Answer() {
@@ -223,12 +241,14 @@ public class NotificationHookConsumerTest {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
notificationHookConsumer.consumers.get(0).start();
Thread.sleep(500);
+
return null;
}
}).when(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
notificationHookConsumer.startInternal(configuration, executorService);
notificationHookConsumer.instanceIsPassive();
+
verify(notificationInterface).close();
verify(executorService).shutdown();
verify(notificationConsumerMock).wakeup();
@@ -236,18 +256,21 @@ public class NotificationHookConsumerTest {
@Test
public void consumersStoppedBeforeStarting() throws Exception {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
- consumers.add(notificationConsumerMock);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).thenReturn(consumers);
final NotificationHookConsumer notificationHookConsumer = new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
notificationHookConsumer.startInternal(configuration, executorService);
notificationHookConsumer.instanceIsPassive();
+
verify(notificationInterface).close();
verify(executorService).shutdown();
}
@@ -261,13 +284,16 @@ public class NotificationHookConsumerTest {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
notificationHookConsumer.consumers.get(0).start();
Thread.sleep(1000);
+
return null;
}
}).when(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
notificationHookConsumer.startInternal(configuration, executorService);
Thread.sleep(1000);
+
assertTrue(notificationHookConsumer.consumers.get(0).isAlive());
+
notificationHookConsumer.consumers.get(0).shutdown();
}
@@ -280,27 +306,32 @@ public class NotificationHookConsumerTest {
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
notificationHookConsumer.consumers.get(0).start();
Thread.sleep(500);
+
return null;
}
}).when(executorService).submit(any(NotificationHookConsumer.HookConsumer.class));
notificationHookConsumer.startInternal(configuration, executorService);
Thread.sleep(500);
+
notificationHookConsumer.consumers.get(0).shutdown();
Thread.sleep(500);
+
assertFalse(notificationHookConsumer.consumers.get(0).isAlive());
}
private NotificationHookConsumer setupNotificationHookConsumer() throws AtlasException {
+ List<NotificationConsumer<Object>> consumers = new ArrayList();
+ NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
+
+ consumers.add(notificationConsumerMock);
+
when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE);
when(configuration.getBoolean(HAConfiguration.ATLAS_SERVER_HA_ENABLED_KEY, false)).thenReturn(true);
when(configuration.getInt(NotificationHookConsumer.CONSUMER_THREADS_PROPERTY, 1)).thenReturn(1);
- List<NotificationConsumer<Object>> consumers = new ArrayList();
- NotificationConsumer notificationConsumerMock = mock(NotificationConsumer.class);
when(notificationConsumerMock.receive()).thenThrow(new IllegalStateException());
- consumers.add(notificationConsumerMock);
+ when(notificationInterface.createConsumers(NotificationType.HOOK, 1)).thenReturn(consumers);
- when(notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, 1)).thenReturn(consumers);
return new NotificationHookConsumer(notificationInterface, atlasEntityStore, serviceState, instanceConverter, typeRegistry);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/util/RestUtilsTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/util/RestUtilsTest.java b/webapp/src/test/java/org/apache/atlas/util/RestUtilsTest.java
index 87259df..0d4af1e 100644
--- a/webapp/src/test/java/org/apache/atlas/util/RestUtilsTest.java
+++ b/webapp/src/test/java/org/apache/atlas/util/RestUtilsTest.java
@@ -22,12 +22,10 @@ import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasStructDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
@@ -44,24 +42,12 @@ import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.type.AtlasTypeRegistry.AtlasTransientTypeRegistry;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.testng.Assert;
import org.testng.annotations.Test;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
/**
* Validates that conversion from V1 to legacy types (and back) is consistent. This also tests
* that the conversion logic in AtlasStructDefStoreV1 is consistent with the conversion logic
@@ -76,13 +62,13 @@ public class RestUtilsTest {
// in tables attribute in "database" type is lost. See ATLAS-1528.
public void testBidirectonalCompositeMappingConsistent() throws AtlasBaseException {
- HierarchicalTypeDefinition<ClassType> dbV1Type = TypesUtil.createClassTypeDef("database",
- ImmutableSet.<String> of(), new AttributeDefinition("tables", DataTypes.arrayTypeName("table"),
- Multiplicity.OPTIONAL, true, "containingDatabase"));
+ ClassTypeDefinition dbV1Type = TypesUtil.createClassTypeDef("database", "", Collections.emptySet(),
+ new AttributeDefinition("tables", AtlasBaseTypeDef.getArrayTypeName("table"),
+ Multiplicity.OPTIONAL, true, "containingDatabase"));
- HierarchicalTypeDefinition<ClassType> tableV1Type = TypesUtil.createClassTypeDef("table",
- ImmutableSet.<String> of(),
- new AttributeDefinition("containingDatabase", "database", Multiplicity.OPTIONAL, false, "tables"));
+ ClassTypeDefinition tableV1Type = TypesUtil.createClassTypeDef("table", "", Collections.emptySet(),
+ new AttributeDefinition("containingDatabase", "database",
+ Multiplicity.OPTIONAL, false, "tables"));
testV1toV2toV1Conversion(Arrays.asList(dbV1Type, tableV1Type), new boolean[] { true, false });
}
@@ -92,121 +78,118 @@ public class RestUtilsTest {
// "containingDatabase" is lost
// in "table" attribute in "database". See ATLAS-1528.
public void testBidirectonalNonCompositeMappingConsistent() throws AtlasBaseException {
+ ClassTypeDefinition dbV1Type = TypesUtil.createClassTypeDef("database", "", Collections.emptySet(),
+ new AttributeDefinition("tables", AtlasBaseTypeDef.getArrayTypeName("table"),
+ Multiplicity.OPTIONAL, false, "containingDatabase"));
- HierarchicalTypeDefinition<ClassType> dbV1Type = TypesUtil.createClassTypeDef("database",
- ImmutableSet.<String> of(), new AttributeDefinition("tables", DataTypes.arrayTypeName("table"),
- Multiplicity.OPTIONAL, false, "containingDatabase"));
-
- HierarchicalTypeDefinition<ClassType> tableV1Type = TypesUtil.createClassTypeDef("table",
- ImmutableSet.<String> of(),
- new AttributeDefinition("containingDatabase", "database", Multiplicity.OPTIONAL, false, "tables"));
+ ClassTypeDefinition tableV1Type = TypesUtil.createClassTypeDef("table", "", Collections.emptySet(),
+ new AttributeDefinition("containingDatabase", "database",
+ Multiplicity.OPTIONAL, false, "tables"));
testV1toV2toV1Conversion(Arrays.asList(dbV1Type, tableV1Type), new boolean[] { false, false });
}
private AtlasTypeDefGraphStoreV1 makeTypeStore(AtlasTypeRegistry reg) {
-
AtlasTypeDefGraphStoreV1 result = mock(AtlasTypeDefGraphStoreV1.class);
for (AtlasEntityType type : reg.getAllEntityTypes()) {
- String typeName = type.getTypeName();
+ String typeName = type.getTypeName();
AtlasVertex typeVertex = mock(AtlasVertex.class);
+
when(result.isTypeVertex(eq(typeVertex), any(TypeCategory.class))).thenReturn(true);
- when(typeVertex.getProperty(eq(Constants.TYPE_CATEGORY_PROPERTY_KEY), eq(TypeCategory.class)))
- .thenReturn(TypeCategory.CLASS);
+ when(typeVertex.getProperty(eq(Constants.TYPE_CATEGORY_PROPERTY_KEY), eq(TypeCategory.class))).thenReturn(TypeCategory.CLASS);
String attributeListPropertyKey = AtlasGraphUtilsV1.getTypeDefPropertyKey(typeName);
- when(typeVertex.getProperty(eq(attributeListPropertyKey), eq(List.class)))
- .thenReturn(new ArrayList<>(type.getAllAttributes().keySet()));
+
+ when(typeVertex.getProperty(eq(attributeListPropertyKey), eq(List.class))).thenReturn(new ArrayList<>(type.getAllAttributes().keySet()));
+
for (AtlasAttribute attribute : type.getAllAttributes().values()) {
String attributeDefPropertyKey = AtlasGraphUtilsV1.getTypeDefPropertyKey(typeName, attribute.getName());
- String attributeJson = AtlasStructDefStoreV1.toJsonFromAttribute(attribute);
+ String attributeJson = AtlasStructDefStoreV1.toJsonFromAttribute(attribute);
+
when(typeVertex.getProperty(eq(attributeDefPropertyKey), eq(String.class))).thenReturn(attributeJson);
}
+
when(result.findTypeVertexByName(eq(typeName))).thenReturn(typeVertex);
}
+
return result;
}
- private AtlasAttributeDef convertToJsonAndBack(AtlasTypeRegistry registry, AtlasStructDef structDef,
- AtlasAttributeDef attributeDef, boolean compositeExpected) throws AtlasBaseException {
-
+ private AtlasAttributeDef convertToJsonAndBack(AtlasTypeRegistry registry, AtlasStructDef structDef, AtlasAttributeDef attributeDef, boolean compositeExpected) throws AtlasBaseException {
AtlasTypeDefGraphStoreV1 typeDefStore = makeTypeStore(registry);
- AtlasStructType structType = (AtlasStructType) registry.getType(structDef.getName());
- AtlasAttribute attribute = structType.getAttribute(attributeDef.getName());
- String attribJson = AtlasStructDefStoreV1.toJsonFromAttribute(attribute);
+ AtlasStructType structType = (AtlasStructType) registry.getType(structDef.getName());
+ AtlasAttribute attribute = structType.getAttribute(attributeDef.getName());
+ String attribJson = AtlasStructDefStoreV1.toJsonFromAttribute(attribute);
+ Map attrInfo = AtlasType.fromJson(attribJson, Map.class);
- Map attrInfo = AtlasType.fromJson(attribJson, Map.class);
Assert.assertEquals(attrInfo.get("isComposite"), compositeExpected);
+
return AtlasStructDefStoreV1.toAttributeDefFromJson(structDef, attrInfo, typeDefStore);
}
- private void testV1toV2toV1Conversion(List<HierarchicalTypeDefinition<ClassType>> typesToTest,
- boolean[] compositeExpected) throws AtlasBaseException {
-
+ private void testV1toV2toV1Conversion(List<ClassTypeDefinition> typesToTest, boolean[] compositeExpected) throws AtlasBaseException {
List<AtlasEntityDef> convertedEntityDefs = convertV1toV2(typesToTest);
+ AtlasTypeRegistry registry = createRegistry(convertedEntityDefs);
- AtlasTypeRegistry registry = createRegistry(convertedEntityDefs);
for(int i = 0 ; i < convertedEntityDefs.size(); i++) {
AtlasEntityDef def = convertedEntityDefs.get(i);
+
for (AtlasAttributeDef attrDef : def.getAttributeDefs()) {
AtlasAttributeDef converted = convertToJsonAndBack(registry, def, attrDef, compositeExpected[i]);
+
Assert.assertEquals(converted, attrDef);
}
}
- List<HierarchicalTypeDefinition<ClassType>> convertedBackTypeDefs = convertV2toV1(convertedEntityDefs);
+ List<ClassTypeDefinition> convertedBackTypeDefs = convertV2toV1(convertedEntityDefs);
for (int i = 0; i < typesToTest.size(); i++) {
+ ClassTypeDefinition convertedBack = convertedBackTypeDefs.get(i);
- HierarchicalTypeDefinition<ClassType> convertedBack = convertedBackTypeDefs.get(i);
Assert.assertEquals(convertedBack, typesToTest.get(i));
- AttributeDefinition[] attributeDefinitions = convertedBack.attributeDefinitions;
- if (attributeDefinitions.length > 0) {
- Assert.assertEquals(attributeDefinitions[0].isComposite, compositeExpected[i]);
+
+ List<AttributeDefinition> attributeDefinitions = convertedBack.getAttributeDefinitions();
+
+ if (attributeDefinitions.size() > 0) {
+ Assert.assertEquals(attributeDefinitions.get(0).getIsComposite(), compositeExpected[i]);
}
}
-
}
- private List<HierarchicalTypeDefinition<ClassType>> convertV2toV1(List<AtlasEntityDef> toConvert)
- throws AtlasBaseException {
-
- AtlasTypeRegistry reg = createRegistry(toConvert);
+ private List<ClassTypeDefinition> convertV2toV1(List<AtlasEntityDef> toConvert) throws AtlasBaseException {
+ AtlasTypeRegistry reg = createRegistry(toConvert);
+ List<ClassTypeDefinition> result = new ArrayList<>(toConvert.size());
- List<HierarchicalTypeDefinition<ClassType>> result = new ArrayList<>(toConvert.size());
for (int i = 0; i < toConvert.size(); i++) {
- AtlasEntityDef entityDef = toConvert.get(i);
- AtlasEntityType entity = reg.getEntityTypeByName(entityDef.getName());
- HierarchicalTypeDefinition<ClassType> converted = TypeConverterUtil.toTypesDef(entity, reg)
- .classTypesAsJavaList().get(0);
+ AtlasEntityDef entityDef = toConvert.get(i);
+ AtlasEntityType entity = reg.getEntityTypeByName(entityDef.getName());
+ ClassTypeDefinition converted = TypeConverterUtil.toTypesDef(entity, reg).getClassTypes().get(0);
+
result.add(converted);
}
+
return result;
}
private AtlasTypeRegistry createRegistry(List<AtlasEntityDef> toConvert) throws AtlasBaseException {
- AtlasTypeRegistry reg = new AtlasTypeRegistry();
+ AtlasTypeRegistry reg = new AtlasTypeRegistry();
AtlasTransientTypeRegistry tmp = reg.lockTypeRegistryForUpdate();
+
tmp.addTypes(toConvert);
reg.releaseTypeRegistryForUpdate(tmp, true);
+
return reg;
}
- private List<AtlasEntityDef> convertV1toV2(List<HierarchicalTypeDefinition<ClassType>> types)
- throws AtlasBaseException {
-
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classTypeList = ImmutableList
- .<HierarchicalTypeDefinition<ClassType>> builder().addAll(types).build();
-
- TypesDef toConvert = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition> of(),
- ImmutableList.<StructTypeDefinition> of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>> of(),
- classTypeList);
+ private List<AtlasEntityDef> convertV1toV2(List<ClassTypeDefinition> types) throws AtlasBaseException {
+ List<ClassTypeDefinition> classTypeList = new ArrayList(types);
+ TypesDef toConvert = new TypesDef(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(), classTypeList);
+ String json = AtlasType.toV1Json(toConvert);
+ AtlasTypeRegistry emptyRegistry = new AtlasTypeRegistry();
+ AtlasTypesDef converted = TypeConverterUtil.toAtlasTypesDef(json, emptyRegistry);
+ List<AtlasEntityDef> convertedEntityDefs = converted.getEntityDefs();
- String json = TypesSerialization.toJson(toConvert);
- AtlasTypeRegistry emptyRegistry = new AtlasTypeRegistry();
- AtlasTypesDef converted = TypeConverterUtil.toAtlasTypesDef(json, emptyRegistry);
- List<AtlasEntityDef> convertedEntityDefs = converted.getEntityDefs();
return convertedEntityDefs;
}
}
[04/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/notification/NotificationEntityChangeListener.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/notification/NotificationEntityChangeListener.java b/webapp/src/main/java/org/apache/atlas/notification/NotificationEntityChangeListener.java
index 53acf56..4633de9 100644
--- a/webapp/src/main/java/org/apache/atlas/notification/NotificationEntityChangeListener.java
+++ b/webapp/src/main/java/org/apache/atlas/notification/NotificationEntityChangeListener.java
@@ -21,44 +21,34 @@ import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasException;
import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.notification.entity.EntityNotification;
-import org.apache.atlas.notification.entity.EntityNotificationImpl;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1.OperationType;
import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.types.FieldMapping;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.type.AtlasClassificationType;
+import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.configuration.Configuration;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
/**
* Listen to the repository for entity changes and produce entity change notifications.
*/
@Component
public class NotificationEntityChangeListener implements EntityChangeListener {
+ private static final String ATLAS_ENTITY_NOTIFICATION_PROPERTY = "atlas.notification.entity";
- private final NotificationInterface notificationInterface;
- private final TypeSystem typeSystem;
+ private final NotificationInterface notificationInterface;
+ private final AtlasTypeRegistry typeRegistry;
+ private final Map<String, List<String>> notificationAttributesCache = new HashMap<>();
- private Map<String, List<String>> notificationAttributesCache = new HashMap<>();
- private static final String ATLAS_ENTITY_NOTIFICATION_PROPERTY = "atlas.notification.entity";
- static Configuration APPLICATION_PROPERTIES = null;
+ private static Configuration APPLICATION_PROPERTIES = null;
@@ -68,45 +58,45 @@ public class NotificationEntityChangeListener implements EntityChangeListener {
* Construct a NotificationEntityChangeListener.
*
* @param notificationInterface the notification framework interface
- * @param typeSystem the Atlas type system
+ * @param typeRegistry the Atlas type system
*/
@Inject
- public NotificationEntityChangeListener(NotificationInterface notificationInterface, TypeSystem typeSystem) {
+ public NotificationEntityChangeListener(NotificationInterface notificationInterface, AtlasTypeRegistry typeRegistry) {
this.notificationInterface = notificationInterface;
- this.typeSystem = typeSystem;
+ this.typeRegistry = typeRegistry;
}
// ----- EntityChangeListener ----------------------------------------------
@Override
- public void onEntitiesAdded(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
- notifyOfEntityEvent(entities, EntityNotification.OperationType.ENTITY_CREATE);
+ public void onEntitiesAdded(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
+ notifyOfEntityEvent(entities, OperationType.ENTITY_CREATE);
}
@Override
- public void onEntitiesUpdated(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
- notifyOfEntityEvent(entities, EntityNotification.OperationType.ENTITY_UPDATE);
+ public void onEntitiesUpdated(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
+ notifyOfEntityEvent(entities, OperationType.ENTITY_UPDATE);
}
@Override
- public void onTraitsAdded(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException {
- notifyOfEntityEvent(Collections.singleton(entity), EntityNotification.OperationType.TRAIT_ADD);
+ public void onTraitsAdded(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException {
+ notifyOfEntityEvent(Collections.singleton(entity), OperationType.TRAIT_ADD);
}
@Override
- public void onTraitsDeleted(ITypedReferenceableInstance entity, Collection<String> traitNames) throws AtlasException {
- notifyOfEntityEvent(Collections.singleton(entity), EntityNotification.OperationType.TRAIT_DELETE);
+ public void onTraitsDeleted(Referenceable entity, Collection<String> traitNames) throws AtlasException {
+ notifyOfEntityEvent(Collections.singleton(entity), OperationType.TRAIT_DELETE);
}
@Override
- public void onTraitsUpdated(ITypedReferenceableInstance entity, Collection<? extends IStruct> traits) throws AtlasException {
- notifyOfEntityEvent(Collections.singleton(entity), EntityNotification.OperationType.TRAIT_UPDATE);
+ public void onTraitsUpdated(Referenceable entity, Collection<? extends Struct> traits) throws AtlasException {
+ notifyOfEntityEvent(Collections.singleton(entity), OperationType.TRAIT_UPDATE);
}
@Override
- public void onEntitiesDeleted(Collection<ITypedReferenceableInstance> entities, boolean isImport) throws AtlasException {
- notifyOfEntityEvent(entities, EntityNotification.OperationType.ENTITY_DELETE);
+ public void onEntitiesDeleted(Collection<Referenceable> entities, boolean isImport) throws AtlasException {
+ notifyOfEntityEvent(entities, OperationType.ENTITY_DELETE);
}
@@ -115,57 +105,52 @@ public class NotificationEntityChangeListener implements EntityChangeListener {
// ----- helper methods ----------------------------------------------------
@VisibleForTesting
- public static List<IStruct> getAllTraits(IReferenceableInstance entityDefinition,
- TypeSystem typeSystem) throws AtlasException {
- List<IStruct> traitInfo = new LinkedList<>();
- for (String traitName : entityDefinition.getTraits()) {
- IStruct trait = entityDefinition.getTrait(traitName);
- String typeName = trait.getTypeName();
- Map<String, Object> valuesMap = trait.getValuesMap();
- traitInfo.add(new Struct(typeName, valuesMap));
- traitInfo.addAll(getSuperTraits(typeName, valuesMap, typeSystem));
- }
- return traitInfo;
- }
+ public static List<Struct> getAllTraits(Referenceable entityDefinition, AtlasTypeRegistry typeRegistry) throws AtlasException {
+ List<Struct> ret = new ArrayList<>();
- private static List<IStruct> getSuperTraits(
- String typeName, Map<String, Object> values, TypeSystem typeSystem) throws AtlasException {
+ for (String traitName : entityDefinition.getTraitNames()) {
+ Struct trait = entityDefinition.getTrait(traitName);
+ AtlasClassificationType traitType = typeRegistry.getClassificationTypeByName(traitName);
+ Set<String> superTypeNames = traitType != null ? traitType.getAllSuperTypes() : null;
- List<IStruct> superTypes = new LinkedList<>();
+ ret.add(trait);
- TraitType traitDef = typeSystem.getDataType(TraitType.class, typeName);
- Set<String> superTypeNames = traitDef.getAllSuperTypeNames();
+ if (CollectionUtils.isNotEmpty(superTypeNames)) {
+ for (String superTypeName : superTypeNames) {
+ Struct superTypeTrait = new Struct(superTypeName);
- for (String superTypeName : superTypeNames) {
- TraitType superTraitDef = typeSystem.getDataType(TraitType.class, superTypeName);
+ if (MapUtils.isNotEmpty(trait.getValues())) {
+ AtlasClassificationType superType = typeRegistry.getClassificationTypeByName(superTypeName);
- Map<String, Object> superTypeValues = new HashMap<>();
+ if (superType != null && MapUtils.isNotEmpty(superType.getAllAttributes())) {
+ Map<String, Object> superTypeTraitAttributes = new HashMap<>();
- FieldMapping fieldMapping = superTraitDef.fieldMapping();
+ for (Map.Entry<String, Object> attrEntry : trait.getValues().entrySet()) {
+ String attrName = attrEntry.getKey();
- if (fieldMapping != null) {
- Set<String> superTypeAttributeNames = fieldMapping.fields.keySet();
+ if (superType.getAllAttributes().containsKey(attrName)) {
+ superTypeTraitAttributes.put(attrName, attrEntry.getValue());
+ }
+ }
- for (String superTypeAttributeName : superTypeAttributeNames) {
- if (values.containsKey(superTypeAttributeName)) {
- superTypeValues.put(superTypeAttributeName, values.get(superTypeAttributeName));
+ superTypeTrait.setValues(superTypeTraitAttributes);
+ }
}
+
+ ret.add(superTypeTrait);
}
}
- IStruct superTrait = new Struct(superTypeName, superTypeValues);
- superTypes.add(superTrait);
- superTypes.addAll(getSuperTraits(superTypeName, values, typeSystem));
}
- return superTypes;
+ return ret;
}
// send notification of entity change
- private void notifyOfEntityEvent(Collection<ITypedReferenceableInstance> entityDefinitions,
- EntityNotification.OperationType operationType) throws AtlasException {
- List<EntityNotification> messages = new LinkedList<>();
+ private void notifyOfEntityEvent(Collection<Referenceable> entityDefinitions,
+ OperationType operationType) throws AtlasException {
+ List<EntityNotificationV1> messages = new ArrayList<>();
- for (IReferenceableInstance entityDefinition : entityDefinitions) {
+ for (Referenceable entityDefinition : entityDefinitions) {
if(GraphHelper.isInternalType(entityDefinition.getTypeName())) {
continue;
}
@@ -182,13 +167,13 @@ public class NotificationEntityChangeListener implements EntityChangeListener {
}
}
- EntityNotificationImpl notification = new EntityNotificationImpl(entity, operationType, getAllTraits(entity, typeSystem));
+ EntityNotificationV1 notification = new EntityNotificationV1(entity, operationType, getAllTraits(entity, typeRegistry));
messages.add(notification);
}
if (!messages.isEmpty()) {
- notificationInterface.send(NotificationInterface.NotificationType.ENTITIES, messages);
+ notificationInterface.send(NotificationType.ENTITIES, messages);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
index 4646bff..456a778 100644
--- a/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
+++ b/webapp/src/main/java/org/apache/atlas/notification/NotificationHookConsumer.java
@@ -25,17 +25,18 @@ import org.apache.atlas.AtlasBaseClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.RequestContext;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.ha.HAConfiguration;
import org.apache.atlas.kafka.AtlasKafkaMessage;
import org.apache.atlas.listener.ActiveStateChangeHandler;
-import org.apache.atlas.model.instance.AtlasEntity;
-import org.apache.atlas.notification.hook.HookNotification.EntityCreateRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityDeleteRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityPartialUpdateRequest;
-import org.apache.atlas.notification.hook.HookNotification.EntityUpdateRequest;
-import org.apache.atlas.notification.hook.HookNotification.HookNotificationMessage;
+import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
+import org.apache.atlas.model.notification.HookNotification;
+import org.apache.atlas.notification.NotificationInterface.NotificationType;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityCreateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityDeleteRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityPartialUpdateRequest;
+import org.apache.atlas.v1.model.notification.HookNotificationV1.EntityUpdateRequest;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v1.AtlasEntityStream;
@@ -43,7 +44,6 @@ import org.apache.atlas.repository.store.graph.v1.AtlasGraphUtilsV1;
import org.apache.atlas.service.Service;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.web.filters.AuditFilter;
import org.apache.atlas.web.service.ServiceState;
@@ -57,10 +57,7 @@ import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
+import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
@@ -77,37 +74,37 @@ import static org.apache.atlas.AtlasClientV2.API_V2.UPDATE_ENTITY_BY_ATTRIBUTE;
@Order(4)
@DependsOn(value = {"atlasTypeDefStoreInitializer", "atlasTypeDefGraphStoreV1"})
public class NotificationHookConsumer implements Service, ActiveStateChangeHandler {
- private static final Logger LOG = LoggerFactory.getLogger(NotificationHookConsumer.class);
- private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger(NotificationHookConsumer.class);
- private static final String LOCALHOST = "localhost";
- private static Logger FAILED_LOG = LoggerFactory.getLogger("FAILED");
+ private static final Logger LOG = LoggerFactory.getLogger(NotificationHookConsumer.class);
+ private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger(NotificationHookConsumer.class);
+ private static final Logger FAILED_LOG = LoggerFactory.getLogger("FAILED");
+ private static final String LOCALHOST = "localhost";
private static final String THREADNAME_PREFIX = NotificationHookConsumer.class.getSimpleName();
- public static final String CONSUMER_THREADS_PROPERTY = "atlas.notification.hook.numthreads";
- public static final String CONSUMER_RETRIES_PROPERTY = "atlas.notification.hook.maxretries";
+ public static final String CONSUMER_THREADS_PROPERTY = "atlas.notification.hook.numthreads";
+ public static final String CONSUMER_RETRIES_PROPERTY = "atlas.notification.hook.maxretries";
public static final String CONSUMER_FAILEDCACHESIZE_PROPERTY = "atlas.notification.hook.failedcachesize";
- public static final String CONSUMER_RETRY_INTERVAL = "atlas.notification.consumer.retry.interval";
- public static final String CONSUMER_MIN_RETRY_INTERVAL = "atlas.notification.consumer.min.retry.interval";
- public static final String CONSUMER_MAX_RETRY_INTERVAL = "atlas.notification.consumer.max.retry.interval";
-
+ public static final String CONSUMER_RETRY_INTERVAL = "atlas.notification.consumer.retry.interval";
+ public static final String CONSUMER_MIN_RETRY_INTERVAL = "atlas.notification.consumer.min.retry.interval";
+ public static final String CONSUMER_MAX_RETRY_INTERVAL = "atlas.notification.consumer.max.retry.interval";
public static final int SERVER_READY_WAIT_TIME_MS = 1000;
- private final AtlasEntityStore atlasEntityStore;
- private final ServiceState serviceState;
+
+ private final AtlasEntityStore atlasEntityStore;
+ private final ServiceState serviceState;
private final AtlasInstanceConverter instanceConverter;
- private final AtlasTypeRegistry typeRegistry;
- private final int maxRetries;
- private final int failedMsgCacheSize;
+ private final AtlasTypeRegistry typeRegistry;
+ private final int maxRetries;
+ private final int failedMsgCacheSize;
+ private final int minWaitDuration;
+ private final int maxWaitDuration;
+
+ private NotificationInterface notificationInterface;
+ private ExecutorService executors;
+ private Configuration applicationProperties;
@VisibleForTesting
final int consumerRetryInterval;
- private final int minWaitDuration;
- private final int maxWaitDuration;
-
- private NotificationInterface notificationInterface;
- private ExecutorService executors;
- private Configuration applicationProperties;
@VisibleForTesting
List<HookConsumer> consumers;
@@ -117,18 +114,17 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
ServiceState serviceState, AtlasInstanceConverter instanceConverter,
AtlasTypeRegistry typeRegistry) throws AtlasException {
this.notificationInterface = notificationInterface;
- this.atlasEntityStore = atlasEntityStore;
- this.serviceState = serviceState;
- this.instanceConverter = instanceConverter;
- this.typeRegistry = typeRegistry;
-
+ this.atlasEntityStore = atlasEntityStore;
+ this.serviceState = serviceState;
+ this.instanceConverter = instanceConverter;
+ this.typeRegistry = typeRegistry;
this.applicationProperties = ApplicationProperties.get();
- maxRetries = applicationProperties.getInt(CONSUMER_RETRIES_PROPERTY, 3);
- failedMsgCacheSize = applicationProperties.getInt(CONSUMER_FAILEDCACHESIZE_PROPERTY, 20);
+ maxRetries = applicationProperties.getInt(CONSUMER_RETRIES_PROPERTY, 3);
+ failedMsgCacheSize = applicationProperties.getInt(CONSUMER_FAILEDCACHESIZE_PROPERTY, 20);
consumerRetryInterval = applicationProperties.getInt(CONSUMER_RETRY_INTERVAL, 500);
- minWaitDuration = applicationProperties.getInt(CONSUMER_MIN_RETRY_INTERVAL, consumerRetryInterval); // 500 ms by default
- maxWaitDuration = applicationProperties.getInt(CONSUMER_MAX_RETRY_INTERVAL, minWaitDuration * 60); // 30 sec by default
+ minWaitDuration = applicationProperties.getInt(CONSUMER_MIN_RETRY_INTERVAL, consumerRetryInterval); // 500 ms by default
+ maxWaitDuration = applicationProperties.getInt(CONSUMER_MAX_RETRY_INTERVAL, minWaitDuration * 60); // 30 sec by default
}
@Override
@@ -145,21 +141,24 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
}
if (!HAConfiguration.isHAEnabled(configuration)) {
LOG.info("HA is disabled, starting consumers inline.");
+
startConsumers(executorService);
}
}
private void startConsumers(ExecutorService executorService) {
- int numThreads = applicationProperties.getInt(CONSUMER_THREADS_PROPERTY, 1);
- List<NotificationConsumer<HookNotificationMessage>> notificationConsumers =
- notificationInterface.createConsumers(NotificationInterface.NotificationType.HOOK, numThreads);
+ int numThreads = applicationProperties.getInt(CONSUMER_THREADS_PROPERTY, 1);
+ List<NotificationConsumer<HookNotification>> notificationConsumers = notificationInterface.createConsumers(NotificationType.HOOK, numThreads);
+
if (executorService == null) {
- executorService = Executors.newFixedThreadPool(notificationConsumers.size(),
- new ThreadFactoryBuilder().setNameFormat(THREADNAME_PREFIX + " thread-%d").build());
+ executorService = Executors.newFixedThreadPool(notificationConsumers.size(), new ThreadFactoryBuilder().setNameFormat(THREADNAME_PREFIX + " thread-%d").build());
}
+
executors = executorService;
- for (final NotificationConsumer<HookNotificationMessage> consumer : notificationConsumers) {
+
+ for (final NotificationConsumer<HookNotification> consumer : notificationConsumers) {
HookConsumer hookConsumer = new HookConsumer(consumer);
+
consumers.add(hookConsumer);
executors.submit(hookConsumer);
}
@@ -172,11 +171,14 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
stopConsumerThreads();
if (executors != null) {
executors.shutdown();
+
if (!executors.awaitTermination(5000, TimeUnit.MILLISECONDS)) {
LOG.error("Timed out waiting for consumer threads to shut down, exiting uncleanly");
}
+
executors = null;
}
+
notificationInterface.close();
} catch (InterruptedException e) {
LOG.error("Failure in shutting down consumers");
@@ -190,6 +192,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
for (HookConsumer consumer : consumers) {
consumer.shutdown();
}
+
consumers.clear();
}
@@ -205,6 +208,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
@Override
public void instanceIsActive() {
LOG.info("Reacting to active state: initializing Kafka consumers");
+
startConsumers(executors);
}
@@ -217,6 +221,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
@Override
public void instanceIsPassive() {
LOG.info("Reacting to passive state: shutting down Kafka consumers.");
+
stop();
}
@@ -236,18 +241,17 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
private final long maxDuration;
private final long minDuration;
private final long resetInterval;
+ private long lastWaitAt;
- private long lastWaitAt;
@VisibleForTesting
long waitDuration;
public AdaptiveWaiter(long minDuration, long maxDuration, long increment) {
- this.minDuration = minDuration;
- this.maxDuration = maxDuration;
- this.increment = increment;
-
- this.waitDuration = minDuration;
- this.lastWaitAt = 0;
+ this.minDuration = minDuration;
+ this.maxDuration = maxDuration;
+ this.increment = increment;
+ this.waitDuration = minDuration;
+ this.lastWaitAt = 0;
this.resetInterval = maxDuration * 2;
}
@@ -269,7 +273,9 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
private void setWaitDurations() {
long timeSinceLastWait = (lastWaitAt == 0) ? 0 : System.currentTimeMillis() - lastWaitAt;
+
lastWaitAt = System.currentTimeMillis();
+
if (timeSinceLastWait > resetInterval) {
waitDuration = minDuration;
} else {
@@ -283,14 +289,14 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
@VisibleForTesting
class HookConsumer extends ShutdownableThread {
- private final NotificationConsumer<HookNotificationMessage> consumer;
- private final AtomicBoolean shouldRun = new AtomicBoolean(false);
- private List<HookNotificationMessage> failedMessages = new ArrayList<>();
-
- private final AdaptiveWaiter adaptiveWaiter = new AdaptiveWaiter(minWaitDuration, maxWaitDuration, minWaitDuration);
+ private final NotificationConsumer<HookNotification> consumer;
+ private final AtomicBoolean shouldRun = new AtomicBoolean(false);
+ private final List<HookNotification> failedMessages = new ArrayList<>();
+ private final AdaptiveWaiter adaptiveWaiter = new AdaptiveWaiter(minWaitDuration, maxWaitDuration, minWaitDuration);
- public HookConsumer(NotificationConsumer<HookNotificationMessage> consumer) {
+ public HookConsumer(NotificationConsumer<HookNotification> consumer) {
super("atlas-hook-consumer-thread", false);
+
this.consumer = consumer;
}
@@ -307,8 +313,9 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
try {
while (shouldRun.get()) {
try {
- List<AtlasKafkaMessage<HookNotificationMessage>> messages = consumer.receive();
- for (AtlasKafkaMessage<HookNotificationMessage> msg : messages) {
+ List<AtlasKafkaMessage<HookNotification>> messages = consumer.receive();
+
+ for (AtlasKafkaMessage<HookNotification> msg : messages) {
handleMessage(msg);
}
} catch (IllegalStateException ex) {
@@ -316,6 +323,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
} catch (Exception e) {
if (shouldRun.get()) {
LOG.warn("Exception in NotificationHookConsumer", e);
+
adaptiveWaiter.pause(e);
} else {
break;
@@ -325,6 +333,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
} finally {
if (consumer != null) {
LOG.info("closing NotificationConsumer");
+
consumer.close();
}
@@ -333,11 +342,10 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
}
@VisibleForTesting
- void handleMessage(AtlasKafkaMessage<HookNotificationMessage> kafkaMsg) throws AtlasServiceException, AtlasException {
- AtlasPerfTracer perf = null;
-
- HookNotificationMessage message = kafkaMsg.getMessage();
- String messageUser = message.getUser();
+ void handleMessage(AtlasKafkaMessage<HookNotification> kafkaMsg) throws AtlasServiceException, AtlasException {
+ AtlasPerfTracer perf = null;
+ HookNotification message = kafkaMsg.getMessage();
+ String messageUser = message.getUser();
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, message.getType().name());
@@ -345,21 +353,25 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
try {
// Used for intermediate conversions during create and update
- AtlasEntity.AtlasEntitiesWithExtInfo entities;
+ AtlasEntitiesWithExtInfo entities = null;
+
for (int numRetries = 0; numRetries < maxRetries; numRetries++) {
if (LOG.isDebugEnabled()) {
LOG.debug("handleMessage({}): attempt {}", message.getType().name(), numRetries);
}
+
try {
- RequestContext requestContext = RequestContext.createContext();
+ RequestContextV1 requestContext = RequestContextV1.get();
+
requestContext.setUser(messageUser);
switch (message.getType()) {
case ENTITY_CREATE:
- EntityCreateRequest createRequest = (EntityCreateRequest) message;
+ final EntityCreateRequest createRequest = (EntityCreateRequest) message;
if (numRetries == 0) { // audit only on the first attempt
AtlasBaseClient.API api = AtlasClient.API_V1.CREATE_ENTITY;
+
audit(messageUser, api.getMethod(), api.getNormalizedPath());
}
@@ -373,19 +385,16 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
if (numRetries == 0) { // audit only on the first attempt
AtlasBaseClient.API api = UPDATE_ENTITY_BY_ATTRIBUTE;
- audit(messageUser, api.getMethod(),
- String.format(api.getNormalizedPath(), partialUpdateRequest.getTypeName()));
+
+ audit(messageUser, api.getMethod(), String.format(api.getNormalizedPath(), partialUpdateRequest.getTypeName()));
}
Referenceable referenceable = partialUpdateRequest.getEntity();
+
entities = instanceConverter.toAtlasEntity(referenceable);
AtlasEntityType entityType = typeRegistry.getEntityTypeByName(partialUpdateRequest.getTypeName());
- String guid = AtlasGraphUtilsV1.getGuidByUniqueAttributes(entityType, new HashMap<String, Object>() {
- {
- put(partialUpdateRequest.getAttribute(), partialUpdateRequest.getAttributeValue());
- }
- });
+ String guid = AtlasGraphUtilsV1.getGuidByUniqueAttributes(entityType, Collections.singletonMap(partialUpdateRequest.getAttribute(), (Object)partialUpdateRequest.getAttributeValue()));
// There should only be one root entity
entities.getEntities().get(0).setGuid(guid);
@@ -398,30 +407,30 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
if (numRetries == 0) { // audit only on the first attempt
AtlasBaseClient.API api = DELETE_ENTITY_BY_ATTRIBUTE;
- audit(messageUser, api.getMethod(),
- String.format(api.getNormalizedPath(), deleteRequest.getTypeName()));
+
+ audit(messageUser, api.getMethod(), String.format(api.getNormalizedPath(), deleteRequest.getTypeName()));
}
try {
AtlasEntityType type = (AtlasEntityType) typeRegistry.getType(deleteRequest.getTypeName());
- atlasEntityStore.deleteByUniqueAttributes(type,
- new HashMap<String, Object>() {{
- put(deleteRequest.getAttribute(), deleteRequest.getAttributeValue());
- }});
+
+ atlasEntityStore.deleteByUniqueAttributes(type, Collections.singletonMap(deleteRequest.getAttribute(), (Object) deleteRequest.getAttributeValue()));
} catch (ClassCastException cle) {
LOG.error("Failed to do a partial update on Entity");
}
break;
case ENTITY_FULL_UPDATE:
- EntityUpdateRequest updateRequest = (EntityUpdateRequest) message;
+ final EntityUpdateRequest updateRequest = (EntityUpdateRequest) message;
if (numRetries == 0) { // audit only on the first attempt
AtlasBaseClient.API api = UPDATE_ENTITY;
+
audit(messageUser, api.getMethod(), api.getNormalizedPath());
}
entities = instanceConverter.toAtlasEntities(updateRequest.getEntities());
+
atlasEntityStore.createOrUpdate(new AtlasEntityStream(entities), false);
break;
@@ -434,6 +443,7 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
LOG.warn("Error handling message", e);
try {
LOG.info("Sleeping for {} ms before retry", consumerRetryInterval);
+
Thread.sleep(consumerRetryInterval);
} catch (InterruptedException ie) {
LOG.error("Notification consumer thread sleep interrupted");
@@ -441,14 +451,15 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
if (numRetries == (maxRetries - 1)) {
LOG.warn("Max retries exceeded for message {}", message, e);
+
failedMessages.add(message);
+
if (failedMessages.size() >= failedMsgCacheSize) {
recordFailedMessages();
}
return;
}
} finally {
- RequestContext.clear();
RequestContextV1.clear();
}
}
@@ -460,15 +471,18 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
private void recordFailedMessages() {
//logging failed messages
- for (HookNotificationMessage message : failedMessages) {
+ for (HookNotification message : failedMessages) {
FAILED_LOG.error("[DROPPED_NOTIFICATION] {}", AbstractNotification.getMessageJson(message));
}
+
failedMessages.clear();
}
- private void commit(AtlasKafkaMessage<HookNotificationMessage> kafkaMessage) {
+ private void commit(AtlasKafkaMessage<HookNotification> kafkaMessage) {
recordFailedMessages();
+
TopicPartition partition = new TopicPartition("ATLAS_HOOK", kafkaMessage.getPartition());
+
consumer.commit(partition, kafkaMessage.getOffset() + 1);
}
@@ -476,22 +490,23 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
try {
while (serviceState.getState() != ServiceState.ServiceStateValue.ACTIVE) {
try {
- LOG.info("Atlas Server is not ready. Waiting for {} milliseconds to retry...",
- SERVER_READY_WAIT_TIME_MS);
+ LOG.info("Atlas Server is not ready. Waiting for {} milliseconds to retry...", SERVER_READY_WAIT_TIME_MS);
+
timer.sleep(SERVER_READY_WAIT_TIME_MS);
} catch (InterruptedException e) {
- LOG.info("Interrupted while waiting for Atlas Server to become ready, "
- + "exiting consumer thread.", e);
+ LOG.info("Interrupted while waiting for Atlas Server to become ready, " + "exiting consumer thread.", e);
+
return false;
}
}
} catch (Throwable e) {
- LOG.info(
- "Handled AtlasServiceException while waiting for Atlas Server to become ready, "
- + "exiting consumer thread.", e);
+ LOG.info("Handled AtlasServiceException while waiting for Atlas Server to become ready, exiting consumer thread.", e);
+
return false;
}
+
LOG.info("Atlas Server is ready, can start reading Kafka events.");
+
return true;
}
@@ -506,12 +521,15 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
}
super.initiateShutdown();
+
shouldRun.set(false);
+
if (consumer != null) {
consumer.wakeup();
}
super.awaitShutdown();
+
LOG.info("<== HookConsumer shutdown()");
}
}
@@ -521,7 +539,6 @@ public class NotificationHookConsumer implements Service, ActiveStateChangeHandl
LOG.debug("==> audit({},{}, {})", messageUser, method, path);
}
- AuditFilter.audit(messageUser, THREADNAME_PREFIX, method, LOCALHOST, path, LOCALHOST,
- DateTimeHelper.formatDateUTC(new Date()));
+ AuditFilter.audit(messageUser, THREADNAME_PREFIX, method, LOCALHOST, path, LOCALHOST, DateTimeHelper.formatDateUTC(new Date()));
}
}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/errors/NotFoundExceptionMapper.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/errors/NotFoundExceptionMapper.java b/webapp/src/main/java/org/apache/atlas/web/errors/NotFoundExceptionMapper.java
index a33d8d7..bc0b440 100644
--- a/webapp/src/main/java/org/apache/atlas/web/errors/NotFoundExceptionMapper.java
+++ b/webapp/src/main/java/org/apache/atlas/web/errors/NotFoundExceptionMapper.java
@@ -17,7 +17,7 @@
*/
package org.apache.atlas.web.errors;
-import org.apache.atlas.typesystem.exception.NotFoundException;
+import org.apache.atlas.exception.NotFoundException;
import org.springframework.stereotype.Component;
import javax.ws.rs.core.Response;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/filters/AtlasAuthenticationFilter.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/AtlasAuthenticationFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/AtlasAuthenticationFilter.java
index e8020db..1d553e0 100644
--- a/webapp/src/main/java/org/apache/atlas/web/filters/AtlasAuthenticationFilter.java
+++ b/webapp/src/main/java/org/apache/atlas/web/filters/AtlasAuthenticationFilter.java
@@ -19,7 +19,7 @@
package org.apache.atlas.web.filters;
import org.apache.atlas.ApplicationProperties;
-import org.apache.atlas.RequestContext;
+import org.apache.atlas.RequestContextV1;
import org.apache.atlas.security.SecurityProperties;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.atlas.web.security.AtlasAuthenticationProvider;
@@ -311,7 +311,7 @@ public class AtlasAuthenticationFilter extends AuthenticationFilter {
try {
String requestUser = httpRequest.getRemoteUser();
NDC.push(requestUser + ":" + httpRequest.getMethod() + httpRequest.getRequestURI());
- RequestContext requestContext = RequestContext.get();
+ RequestContextV1 requestContext = RequestContextV1.get();
if (requestContext != null) {
requestContext.setUser(requestUser);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
index 191388a..3225b0e 100755
--- a/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
+++ b/webapp/src/main/java/org/apache/atlas/web/filters/AuditFilter.java
@@ -20,7 +20,6 @@ package org.apache.atlas.web.filters;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.metrics.Metrics;
import org.apache.atlas.util.AtlasRepositoryConfiguration;
@@ -70,7 +69,8 @@ public class AuditFilter implements Filter {
try {
currentThread.setName(formatName(oldName, requestId));
- RequestContext requestContext = RequestContext.createContext();
+ RequestContextV1.clear();
+ RequestContextV1 requestContext = RequestContextV1.get();
requestContext.setUser(user);
recordAudit(httpRequest, requestTimeISO9601, user);
filterChain.doFilter(request, response);
@@ -79,7 +79,6 @@ public class AuditFilter implements Filter {
((HttpServletResponse) response).setHeader(AtlasClient.REQUEST_ID, requestId);
currentThread.setName(oldName);
recordMetrics();
- RequestContext.clear();
RequestContextV1.clear();
}
}
@@ -120,7 +119,7 @@ public class AuditFilter implements Filter {
public static void recordMetrics() {
//record metrics
- Metrics requestMetrics = RequestContext.getMetrics();
+ Metrics requestMetrics = RequestContextV1.getMetrics();
if (!requestMetrics.isEmpty()) {
METRICS_LOG.info("{}", requestMetrics);
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/resources/DataSetLineageResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/DataSetLineageResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/DataSetLineageResource.java
index 435659e..5660c5b 100644
--- a/webapp/src/main/java/org/apache/atlas/web/resources/DataSetLineageResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/DataSetLineageResource.java
@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,13 +18,21 @@
package org.apache.atlas.web.resources;
-import org.apache.atlas.AtlasClient;
-import org.apache.atlas.discovery.DiscoveryException;
-import org.apache.atlas.discovery.LineageService;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
+import org.apache.atlas.AtlasErrorCode;
+import org.apache.atlas.discovery.AtlasLineageService;
+import org.apache.atlas.exception.AtlasBaseException;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.lineage.AtlasLineageInfo;
+import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
+import org.apache.atlas.repository.store.graph.AtlasEntityStore;
+import org.apache.atlas.type.AtlasEntityType;
+import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.utils.AtlasPerfTracer;
+import org.apache.atlas.v1.model.lineage.DataSetLineageResponse;
+import org.apache.atlas.v1.model.lineage.SchemaResponse;
+import org.apache.atlas.web.util.LineageUtils;
import org.apache.atlas.web.util.Servlets;
-import org.codehaus.jettison.json.JSONObject;
+import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@@ -32,9 +40,18 @@ import org.springframework.stereotype.Service;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.apache.atlas.v1.model.lineage.SchemaResponse.SchemaDetails;
/**
* Jersey Resource for Hive Table Lineage.
@@ -45,20 +62,18 @@ import javax.ws.rs.core.Response;
@Deprecated
public class DataSetLineageResource {
- private static final Logger LOG = LoggerFactory.getLogger(DataSetLineageResource.class);
+ private static final Logger LOG = LoggerFactory.getLogger(DataSetLineageResource.class);
private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("rest.DataSetLineageResource");
- private final LineageService lineageService;
+ private final AtlasLineageService atlasLineageService;
+ private final AtlasTypeRegistry typeRegistry;
+ private final AtlasEntityStore atlasEntityStore;
- /**
- * Created by the Guice ServletModule and injected with the
- * configured LineageService.
- *
- * @param lineageService lineage service handle
- */
@Inject
- public DataSetLineageResource(LineageService lineageService) {
- this.lineageService = lineageService;
+ public DataSetLineageResource(final AtlasLineageService atlasLineageService, final AtlasTypeRegistry typeRegistry, final AtlasEntityStore atlasEntityStore) {
+ this.atlasLineageService = atlasLineageService;
+ this.typeRegistry = typeRegistry;
+ this.atlasEntityStore = atlasEntityStore;
}
/**
@@ -70,30 +85,28 @@ public class DataSetLineageResource {
@Path("table/{tableName}/inputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response inputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
+ public DataSetLineageResponse inputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> DataSetLineageResource.inputsGraph({})", tableName);
}
- AtlasPerfTracer perf = null;
+ DataSetLineageResponse ret = new DataSetLineageResponse();
+ AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "DataSetLineageResource.inputsGraph(tableName=" + tableName + ")");
}
- final String jsonResult = lineageService.getInputsGraph(tableName);
+ String guid = getGuid(tableName);
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put("tableName", tableName);
- response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
+ AtlasLineageInfo lineageInfo = atlasLineageService.getAtlasLineageInfo(guid, LineageDirection.INPUT, -1);
+ ret.setTableName(tableName);
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setResults(LineageUtils.toLineageStruct(lineageInfo, typeRegistry));
- return Response.ok(response).build();
- } catch (EntityNotFoundException e) {
- LOG.error("table entity not found for {}", tableName);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (DiscoveryException | IllegalArgumentException e) {
+ return ret;
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get lineage inputs graph for table {}", tableName, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -116,30 +129,28 @@ public class DataSetLineageResource {
@Path("table/{tableName}/outputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response outputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
+ public DataSetLineageResponse outputsGraph(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> DataSetLineageResource.outputsGraph({})", tableName);
}
- AtlasPerfTracer perf = null;
+ DataSetLineageResponse ret = new DataSetLineageResponse();
+ AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "DataSetLineageResource.outputsGraph(tableName=" + tableName + ")");
}
- final String jsonResult = lineageService.getOutputsGraph(tableName);
+ String guid = getGuid(tableName);
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put("tableName", tableName);
- response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
+ AtlasLineageInfo lineageInfo = atlasLineageService.getAtlasLineageInfo(guid, LineageDirection.OUTPUT, -1);
+ ret.setTableName(tableName);
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setResults(LineageUtils.toLineageStruct(lineageInfo, typeRegistry));
- return Response.ok(response).build();
- } catch (EntityNotFoundException e) {
- LOG.error("table entity not found for {}", tableName);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (DiscoveryException | IllegalArgumentException e) {
+ return ret;
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get lineage outputs graph for table {}", tableName, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -162,30 +173,26 @@ public class DataSetLineageResource {
@Path("table/{tableName}/schema")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response schema(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
+ public SchemaResponse schema(@Context HttpServletRequest request, @PathParam("tableName") String tableName) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> DataSetLineageResource.schema({})", tableName);
}
AtlasPerfTracer perf = null;
+ SchemaResponse ret = new SchemaResponse();
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "DataSetLineageResource.schema(tableName=" + tableName + ")");
}
- final String jsonResult = lineageService.getSchema(tableName);
+ SchemaDetails schemaDetails = atlasLineageService.getSchemaForHiveTableByName(tableName);
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put("tableName", tableName);
- response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
-
- return Response.ok(response).build();
- } catch (EntityNotFoundException e) {
- LOG.error("table entity not found for {}", tableName);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (DiscoveryException | IllegalArgumentException e) {
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setTableName(tableName);
+ ret.setResults(schemaDetails);
+ return ret;
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get schema for table {}", tableName, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -198,4 +205,20 @@ public class DataSetLineageResource {
AtlasPerfTracer.log(perf);
}
}
+
+ private String getGuid(String tableName) throws AtlasBaseException {
+ if (StringUtils.isEmpty(tableName)) {
+ // TODO: Fix the error code if mismatch
+ throw new AtlasBaseException(AtlasErrorCode.BAD_REQUEST);
+ }
+ Map<String, Object> lookupAttributes = new HashMap<>();
+ lookupAttributes.put("qualifiedName", tableName);
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName("hive_table");
+ AtlasEntity.AtlasEntityWithExtInfo hive_table = atlasEntityStore.getByUniqueAttributes(entityType, lookupAttributes);
+ if (hive_table != null) {
+ return hive_table.getEntity().getGuid();
+ } else {
+ throw new AtlasBaseException(AtlasErrorCode.INSTANCE_NOT_FOUND, tableName);
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java
index 8b56507..11879e6 100755
--- a/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/EntityResource.java
@@ -20,39 +20,34 @@ package org.apache.atlas.web.resources;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
-import com.sun.jersey.api.core.ResourceContext;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasConstants;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.CreateUpdateEntitiesResult;
import org.apache.atlas.EntityAuditEvent;
+import org.apache.atlas.discovery.AtlasDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasClassification;
-import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.model.instance.GuidMapping;
import org.apache.atlas.model.legacy.EntityResult;
+import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.converters.AtlasInstanceConverter;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v1.AtlasEntityStream;
import org.apache.atlas.repository.store.graph.v1.AtlasGraphUtilsV1;
-import org.apache.atlas.services.MetadataService;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ValueConversionException;
+import org.apache.atlas.type.AtlasTypeUtil;
import org.apache.atlas.utils.AtlasPerfTracer;
import org.apache.atlas.utils.ParamChecker;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
import org.apache.atlas.web.rest.EntityREST;
import org.apache.atlas.web.util.Servlets;
import org.apache.commons.collections.CollectionUtils;
@@ -98,32 +93,29 @@ public class EntityResource {
private static final String TRAIT_NAME = "traitName";
- private final MetadataService metadataService;
private final AtlasInstanceConverter restAdapters;
private final AtlasEntityStore entitiesStore;
private final AtlasTypeRegistry typeRegistry;
- private final EntityREST entityREST;
+ private final EntityREST entityREST;
+ private final EntityAuditRepository entityAuditRepository;
+ private final AtlasDiscoveryService atlasDiscoveryService;
@Context
UriInfo uriInfo;
- @Context
- private ResourceContext resourceContext;
-
- /**
- * Created by the Guice ServletModule and injected with the
- * configured MetadataService.
- *
- * @param metadataService metadata service handle
- */
@Inject
- public EntityResource(MetadataService metadataService, AtlasInstanceConverter restAdapters,
- AtlasEntityStore entitiesStore, AtlasTypeRegistry typeRegistry, EntityREST entityREST) {
- this.metadataService = metadataService;
- this.restAdapters = restAdapters;
- this.entitiesStore = entitiesStore;
- this.typeRegistry = typeRegistry;
+ public EntityResource(final AtlasInstanceConverter restAdapters,
+ final AtlasEntityStore entitiesStore,
+ final AtlasTypeRegistry typeRegistry,
+ final EntityREST entityREST,
+ final EntityAuditRepository entityAuditRepository,
+ final AtlasDiscoveryService atlasDiscoveryService) {
+ this.restAdapters = restAdapters;
+ this.entitiesStore = entitiesStore;
+ this.typeRegistry = typeRegistry;
this.entityREST = entityREST;
+ this.entityAuditRepository = entityAuditRepository;
+ this.atlasDiscoveryService = atlasDiscoveryService;
}
/**
@@ -149,22 +141,28 @@ public class EntityResource {
String entities = Servlets.getRequestPayload(request);
//Handle backward compatibility - if entities is not JSONArray, convert to JSONArray
+ JSONArray jsonEntities = null;
+
try {
- new JSONArray(entities);
+ jsonEntities = new JSONArray(entities);
} catch (JSONException e) {
final String finalEntities = entities;
- entities = new JSONArray() {{
+ jsonEntities = new JSONArray() {{
put(finalEntities);
- }}.toString();
+ }};
}
- entityJson = AtlasClient.toString(new JSONArray(entities));
+ String[] jsonStrings = new String[jsonEntities.length()];
+
+ for (int i = 0; i < jsonEntities.length(); i++) {
+ jsonStrings[i] = jsonEntities.getString(i);
+ }
if (LOG.isDebugEnabled()) {
- LOG.debug("submitting entities {} ", entityJson);
+ LOG.debug("submitting entities {} ", jsonEntities);
}
- AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntities(entities);
+ AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntities(jsonStrings);
EntityMutationResponse mutationResponse = entityREST.createOrUpdate(entitiesInfo);
final List<String> guids = restAdapters.getGuids(mutationResponse.getCreatedEntities());
@@ -183,12 +181,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw toWebApplicationException(e);
- } catch(EntityExistsException e) {
- LOG.error("Unique constraint violation for entity entityDef={}", entityJson, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT));
- } catch (ValueConversionException ve) {
- LOG.error("Unable to persist entity instance due to a deserialization error entityDef={}", entityJson, ve);
- throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause() != null ? ve.getCause() : ve, Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -224,13 +216,13 @@ public class EntityResource {
return locationURI;
}
- private JSONObject getResponse(EntityResult entityResult) throws AtlasException, JSONException {
+ private JSONObject getResponse(EntityResult entityResult) throws AtlasBaseException, AtlasException, JSONException {
CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult();
result.setEntityResult(entityResult);
return getResponse(result);
}
- private JSONObject getResponse(CreateUpdateEntitiesResult result) throws AtlasException, JSONException {
+ private JSONObject getResponse(CreateUpdateEntitiesResult result) throws AtlasBaseException, AtlasException, JSONException {
JSONObject response = new JSONObject();
EntityResult entityResult = result.getEntityResult();
GuidMapping mapping = result.getGuidMapping();
@@ -239,12 +231,12 @@ public class EntityResource {
response.put(AtlasClient.ENTITIES, new JSONObject(entityResult.toString()).get(AtlasClient.ENTITIES));
String sampleEntityId = getSample(result.getEntityResult());
if (sampleEntityId != null) {
- String entityDefinition = metadataService.getEntityDefinitionJson(sampleEntityId);
+ String entityDefinition = getEntityJson(sampleEntityId);
response.put(AtlasClient.DEFINITION, new JSONObject(entityDefinition));
}
}
if(mapping != null) {
- response.put(AtlasClient.GUID_ASSIGNMENTS, new JSONObject(AtlasType.toJson(mapping)).get(AtlasClient.GUID_ASSIGNMENTS));
+ response.put(AtlasClient.GUID_ASSIGNMENTS, new JSONObject(AtlasType.toV1Json(mapping)).get(AtlasClient.GUID_ASSIGNMENTS));
}
return response;
}
@@ -270,14 +262,18 @@ public class EntityResource {
}
final String entities = Servlets.getRequestPayload(request);
+ JSONArray jsonEntities = new JSONArray(entities);
+ String[] jsonStrings = new String[jsonEntities.length()];
- entityJson = AtlasClient.toString(new JSONArray(entities));
+ for (int i = 0; i < jsonEntities.length(); i++) {
+ jsonStrings[i] = jsonEntities.getString(i);
+ }
if (LOG.isDebugEnabled()) {
LOG.info("updating entities {} ", entityJson);
}
- AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntities(entities);
+ AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntities(jsonStrings);
EntityMutationResponse mutationResponse = entityREST.createOrUpdate(entitiesInfo);
CreateUpdateEntitiesResult result = restAdapters.toCreateUpdateEntitiesResult(mutationResponse);
@@ -290,12 +286,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw toWebApplicationException(e);
- } catch(EntityExistsException e) {
- LOG.error("Unique constraint violation for entityDef={}", entityJson, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT));
- } catch (ValueConversionException ve) {
- LOG.error("Unable to persist entity instance due to a deserialization error entityDef={}", entityJson, ve);
- throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to persist entity instance entityDef={}", entityJson, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -367,7 +357,7 @@ public class EntityResource {
LOG.debug("Partially updating entity by unique attribute {} {} {} {} ", entityType, attribute, value, entityJson);
}
- Referenceable updatedEntity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
+ Referenceable updatedEntity = AtlasType.fromV1Json(entityJson, Referenceable.class);
entityType = ParamChecker.notEmpty(entityType, "Entity type cannot be null");
attribute = ParamChecker.notEmpty(attribute, "attribute name cannot be null");
@@ -379,10 +369,10 @@ public class EntityResource {
// update referenceable with Id if not specified in payload
Id updateId = updatedEntity.getId();
- if (updateId != null && !updateId.isAssigned()) {
+ if (updateId != null && !AtlasTypeUtil.isAssignedGuid(updateId.getId())) {
String guid = AtlasGraphUtilsV1.getGuidByUniqueAttributes(getEntityType(entityType), attributes);
- updatedEntity.replaceWithNewId(new Id(guid, 0, updatedEntity.getTypeName()));
+ updatedEntity.setId(new Id(guid, 0, updatedEntity.getTypeName()));
}
AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntity(updatedEntity);
@@ -398,15 +388,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to partially update entity {} {}:{}.{}", entityJson, entityType, attribute, value, e);
throw toWebApplicationException(e);
- } catch (ValueConversionException ve) {
- LOG.error("Unable to persist entity instance due to a deserialization error {} ", entityJson, ve);
- throw new WebApplicationException(Servlets.getErrorResponse(ve.getCause(), Response.Status.BAD_REQUEST));
- } catch(EntityExistsException e) {
- LOG.error("Unique constraint violation for entity {} ", entityJson, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.CONFLICT));
- } catch (EntityNotFoundException e) {
- LOG.error("An entity with type={} and qualifiedName={} does not exist {} ", entityType, value, entityJson, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to partially update entity {} {}:{}.{}", entityJson, entityType, attribute, value, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -474,13 +455,13 @@ public class EntityResource {
LOG.debug("partially updating entity for guid {} : {} ", guid, entityJson);
}
- Referenceable updatedEntity = InstanceSerialization.fromJsonReferenceable(entityJson, true);
+ Referenceable updatedEntity = AtlasType.fromV1Json(entityJson, Referenceable.class);
// update referenceable with Id if not specified in payload
Id updateId = updatedEntity.getId();
- if (updateId != null && !updateId.isAssigned()) {
- updatedEntity.replaceWithNewId(new Id(guid, 0, updatedEntity.getTypeName()));
+ if (updateId != null && !AtlasTypeUtil.isAssignedGuid(updateId.getId())) {
+ updatedEntity.setId(new Id(guid, 0, updatedEntity.getTypeName()));
}
AtlasEntitiesWithExtInfo entitiesInfo = restAdapters.toAtlasEntity(updatedEntity);
@@ -496,9 +477,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to update entity by GUID {} {} ", guid, entityJson, e);
throw toWebApplicationException(e);
- } catch (EntityNotFoundException e) {
- LOG.error("An entity with GUID={} does not exist {} ", guid, entityJson, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to update entity by GUID {} {}", guid, entityJson, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -544,9 +522,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to add property {} to entity id {} {} ", property, guid, value, e);
throw toWebApplicationException(e);
- } catch (EntityNotFoundException e) {
- LOG.error("An entity with GUID={} does not exist {} ", guid, value, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to add property {} to entity id {} {} ", property, guid, value, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -617,13 +592,6 @@ public class EntityResource {
} catch (AtlasBaseException e) {
LOG.error("Unable to delete entities {} {} {} {} ", guids, entityType, attribute, value, e);
throw toWebApplicationException(e);
- } catch (EntityNotFoundException e) {
- if(guids != null && !guids.isEmpty()) {
- LOG.error("An entity with GUID={} does not exist ", guids, e);
- } else {
- LOG.error("An entity with qualifiedName {}-{}-{} does not exist", entityType, attribute, value, e);
- }
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
} catch (AtlasException | IllegalArgumentException e) {
LOG.error("Unable to delete entities {} {} {} {} ", guids, entityType, attribute, value, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
@@ -666,7 +634,8 @@ public class EntityResource {
}
guid = ParamChecker.notEmpty(guid, "guid cannot be null");
- final String entityDefinition = metadataService.getEntityDefinitionJson(guid);
+
+ String entityDefinition = getEntityJson(guid);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
@@ -681,11 +650,7 @@ public class EntityResource {
}
return Response.status(status).entity(response).build();
-
- } catch (EntityNotFoundException e) {
- LOG.error("An entity with GUID={} does not exist ", guid, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (AtlasException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Bad GUID={} ", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -716,19 +681,19 @@ public class EntityResource {
LOG.debug("Fetching entity list for type={} ", entityType);
}
- final List<String> entityList = metadataService.getEntityList(entityType);
+ List<String> entityGUIDS = entitiesStore.getEntityGUIDS(entityType);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.TYPENAME, entityType);
- response.put(AtlasClient.RESULTS, new JSONArray(entityList));
- response.put(AtlasClient.COUNT, entityList.size());
+ response.put(AtlasClient.RESULTS, new JSONArray(entityGUIDS));
+ response.put(AtlasClient.COUNT, entityGUIDS.size());
return Response.ok(response).build();
} catch (NullPointerException e) {
LOG.error("Entity type cannot be null", e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
- } catch (AtlasException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get entity list for type {}", entityType, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -804,10 +769,9 @@ public class EntityResource {
String entityDefinition = null;
if (entityInfo != null) {
- AtlasEntity entity = entityInfo.getEntity();
- final ITypedReferenceableInstance instance = restAdapters.getITypedReferenceable(entity);
+ Referenceable instance = restAdapters.getReferenceable(entityInfo);
- entityDefinition = InstanceSerialization.toJson(instance, true);
+ entityDefinition = AtlasType.toV1Json(instance);
}
JSONObject response = new JSONObject();
@@ -926,8 +890,8 @@ public class EntityResource {
JSONArray traits = new JSONArray();
for (AtlasClassification classification : classifications) {
- IStruct trait = restAdapters.getTrait(classification);
- traits.put(new JSONObject(InstanceSerialization.toJson(trait, true)));
+ Struct trait = restAdapters.getTrait(classification);
+ traits.put(new JSONObject(AtlasType.toV1Json(trait)));
}
JSONObject response = new JSONObject();
@@ -984,11 +948,11 @@ public class EntityResource {
final AtlasClassification classification = entitiesStore.getClassification(guid, traitName);
- IStruct traitDefinition = restAdapters.getTrait(classification);
+ Struct traitDefinition = restAdapters.getTrait(classification);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put(AtlasClient.RESULTS, new JSONObject(InstanceSerialization.toJson(traitDefinition, true)));
+ response.put(AtlasClient.RESULTS, new JSONObject(AtlasType.toV1Json(traitDefinition)));
return Response.ok(response).build();
@@ -1044,7 +1008,7 @@ public class EntityResource {
add(guid);
}};
- entitiesStore.addClassification(guids, restAdapters.getClassification(InstanceSerialization.fromJsonStruct(traitDefinition, true)));
+ entitiesStore.addClassification(guids, restAdapters.toAtlasClassification(AtlasType.fromV1Json(traitDefinition, Struct.class)));
URI locationURI = getLocationURI(new ArrayList<String>() {{
add(guid);
@@ -1160,13 +1124,13 @@ public class EntityResource {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "EntityResource.getAuditEvents(" + guid + ", " + startKey + ", " + count + ")");
}
- List<EntityAuditEvent> events = metadataService.getAuditEvents(guid, startKey, count);
+ List<EntityAuditEvent> events = entityAuditRepository.listEvents(guid, startKey, count);
JSONObject response = new JSONObject();
response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
response.put(AtlasClient.EVENTS, getJSONArray(events));
return Response.ok(response).build();
- } catch (AtlasException | IllegalArgumentException e) {
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get audit events for entity guid={} startKey={}", guid, startKey, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
@@ -1216,4 +1180,12 @@ public class EntityResource {
return new WebApplicationException(Servlets.getErrorResponse(e, e.getAtlasErrorCode().getHttpCode()));
}
+
+ private String getEntityJson(String guid) throws AtlasBaseException {
+ AtlasEntityWithExtInfo entity = entitiesStore.getById(guid);
+ Referenceable referenceable = restAdapters.getReferenceable(entity);
+ String entityJson = AtlasType.toV1Json(referenceable);
+
+ return entityJson;
+ }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/main/java/org/apache/atlas/web/resources/LineageResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/LineageResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/LineageResource.java
index cba8ccf..891e4d7 100644
--- a/webapp/src/main/java/org/apache/atlas/web/resources/LineageResource.java
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/LineageResource.java
@@ -18,21 +18,16 @@
package org.apache.atlas.web.resources;
-import org.apache.atlas.AtlasClient;
import org.apache.atlas.discovery.AtlasLineageService;
-import org.apache.atlas.discovery.DiscoveryException;
-import org.apache.atlas.discovery.LineageService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.lineage.AtlasLineageInfo;
import org.apache.atlas.model.lineage.AtlasLineageInfo.LineageDirection;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.SchemaNotFoundException;
import org.apache.atlas.utils.AtlasPerfTracer;
+import org.apache.atlas.v1.model.lineage.LineageResponse;
+import org.apache.atlas.v1.model.lineage.SchemaResponse;
import org.apache.atlas.web.util.LineageUtils;
import org.apache.atlas.web.util.Servlets;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
@@ -56,18 +51,15 @@ public class LineageResource {
private static final Logger PERF_LOG = AtlasPerfTracer.getPerfLogger("rest.LineageResource");
private final AtlasLineageService atlasLineageService;
- private final LineageService lineageService;
private final AtlasTypeRegistry typeRegistry;
/**
* Created by the Guice ServletModule and injected with the
* configured LineageService.
*
- * @param lineageService lineage service handle
*/
@Inject
- public LineageResource(LineageService lineageService, AtlasLineageService atlasLineageService, AtlasTypeRegistry typeRegistry) {
- this.lineageService = lineageService;
+ public LineageResource(AtlasLineageService atlasLineageService, AtlasTypeRegistry typeRegistry) {
this.atlasLineageService = atlasLineageService;
this.typeRegistry = typeRegistry;
}
@@ -81,11 +73,13 @@ public class LineageResource {
@Path("{guid}/inputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response inputsGraph(@PathParam("guid") String guid) {
+ public LineageResponse inputsGraph(@PathParam("guid") String guid) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> LineageResource.inputsGraph({})", guid);
}
+ LineageResponse ret = new LineageResponse();
+
AtlasPerfTracer perf = null;
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
@@ -93,22 +87,16 @@ public class LineageResource {
}
AtlasLineageInfo lineageInfo = atlasLineageService.getAtlasLineageInfo(guid, LineageDirection.INPUT, -1);
- final String result = LineageUtils.toLineageStruct(lineageInfo, typeRegistry);
-
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put(AtlasClient.RESULTS, new JSONObject(result));
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setResults(LineageUtils.toLineageStruct(lineageInfo, typeRegistry));
- return Response.ok(response).build();
+ return ret;
} catch (AtlasBaseException e) {
LOG.error("Unable to get lineage inputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e));
} catch (WebApplicationException e) {
LOG.error("Unable to get lineage inputs graph for entity guid={}", guid, e);
throw e;
- } catch (JSONException e) {
- LOG.error("Unable to get lineage inputs graph for entity guid={}", guid, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} finally {
AtlasPerfTracer.log(perf);
@@ -127,11 +115,13 @@ public class LineageResource {
@Path("{guid}/outputs/graph")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response outputsGraph(@PathParam("guid") String guid) {
+ public LineageResponse outputsGraph(@PathParam("guid") String guid) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> LineageResource.outputsGraph({})", guid);
}
+ LineageResponse ret = new LineageResponse();
+
AtlasPerfTracer perf = null;
try {
@@ -140,22 +130,16 @@ public class LineageResource {
}
AtlasLineageInfo lineageInfo = atlasLineageService.getAtlasLineageInfo(guid, LineageDirection.OUTPUT, -1);
- final String result = LineageUtils.toLineageStruct(lineageInfo, typeRegistry);
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setResults(LineageUtils.toLineageStruct(lineageInfo, typeRegistry));
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put(AtlasClient.RESULTS, new JSONObject(result));
-
- return Response.ok(response).build();
+ return ret;
} catch (AtlasBaseException e) {
LOG.error("Unable to get lineage outputs graph for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e));
} catch (WebApplicationException e) {
LOG.error("Unable to get lineage outputs graph for entity guid={}", guid, e);
throw e;
- } catch (JSONException e) {
- LOG.error("Unable to get lineage outputs graph for entity guid={}", guid, e);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
} finally {
AtlasPerfTracer.log(perf);
@@ -174,31 +158,26 @@ public class LineageResource {
@Path("{guid}/schema")
@Consumes(Servlets.JSON_MEDIA_TYPE)
@Produces(Servlets.JSON_MEDIA_TYPE)
- public Response schema(@PathParam("guid") String guid) {
+ public SchemaResponse schema(@PathParam("guid") String guid) {
if (LOG.isDebugEnabled()) {
LOG.debug("==> LineageResource.schema({})", guid);
}
AtlasPerfTracer perf = null;
+ SchemaResponse ret = new SchemaResponse();
+
try {
if (AtlasPerfTracer.isPerfTraceEnabled(PERF_LOG)) {
perf = AtlasPerfTracer.getPerfTracer(PERF_LOG, "LineageResource.schema(" + guid + ")");
}
- final String jsonResult = lineageService.getSchemaForEntity(guid);
+ SchemaResponse.SchemaDetails schemaDetails = atlasLineageService.getSchemaForHiveTableByGuid(guid);
- JSONObject response = new JSONObject();
- response.put(AtlasClient.REQUEST_ID, Servlets.getRequestId());
- response.put(AtlasClient.RESULTS, new JSONObject(jsonResult));
- return Response.ok(response).build();
- } catch (SchemaNotFoundException e) {
- LOG.error("schema not found for {}", guid);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (EntityNotFoundException e) {
- LOG.error("table entity not found for {}", guid);
- throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
- } catch (DiscoveryException | IllegalArgumentException e) {
+ ret.setRequestId(Servlets.getRequestId());
+ ret.setResults(schemaDetails);
+ return ret;
+ } catch (IllegalArgumentException e) {
LOG.error("Unable to get schema for entity guid={}", guid, e);
throw new WebApplicationException(Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
} catch (WebApplicationException e) {
[29/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java b/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java
deleted file mode 100644
index 1fb4ee9..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/TypedInstanceToGraphMapper.java
+++ /dev/null
@@ -1,941 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas.repository.graph;
-
-import com.google.common.base.Function;
-import com.google.common.collect.Lists;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.RequestContext;
-import org.apache.atlas.model.instance.GuidMapping;
-import org.apache.atlas.repository.Constants;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.repository.graphdb.AtlasEdge;
-import org.apache.atlas.repository.graphdb.AtlasSchemaViolationException;
-import org.apache.atlas.repository.graphdb.AtlasVertex;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.atlas.util.AtlasRepositoryConfiguration;
-import org.apache.atlas.utils.SHA256Utils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.stereotype.Component;
-
-import javax.inject.Inject;
-import java.security.MessageDigest;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.atlas.repository.graph.GraphHelper.string;
-
-@Component
-@Deprecated
-public final class TypedInstanceToGraphMapper {
-
- private static final Logger LOG = LoggerFactory.getLogger(TypedInstanceToGraphMapper.class);
- private final Map<Id, AtlasVertex> idToVertexMap = new HashMap<>();
- private final TypeSystem typeSystem = TypeSystem.getInstance();
- private static final GraphHelper graphHelper = GraphHelper.getInstance();
-
- private DeleteHandler deleteHandler;
- private GraphToTypedInstanceMapper graphToTypedInstanceMapper;
-
- @Inject
- public TypedInstanceToGraphMapper(GraphToTypedInstanceMapper graphToTypedInstanceMapper, DeleteHandler deleteHandler) {
- this.graphToTypedInstanceMapper = graphToTypedInstanceMapper;
- this.deleteHandler = deleteHandler;
- }
-
- private final String SIGNATURE_HASH_PROPERTY_KEY = Constants.INTERNAL_PROPERTY_KEY_PREFIX + "signature";
-
- public enum Operation {
- CREATE,
- UPDATE_PARTIAL,
- UPDATE_FULL
- }
-
- void mapTypedInstanceToGraph(Operation operation, ITypedReferenceableInstance... typedInstances)
- throws AtlasException {
-
- RequestContext requestContext = RequestContext.get();
- Collection<IReferenceableInstance> allNewInstances = new ArrayList<>();
- for (ITypedReferenceableInstance typedInstance : typedInstances) {
- allNewInstances.addAll(walkClassInstances(typedInstance));
- }
-
- TypeUtils.Pair<List<ITypedReferenceableInstance>, List<ITypedReferenceableInstance>> instancesPair =
- createVerticesAndDiscoverInstances(allNewInstances);
-
- List<ITypedReferenceableInstance> entitiesToCreate = instancesPair.left;
- List<ITypedReferenceableInstance> entitiesToUpdate = instancesPair.right;
-
- FullTextMapper fulltextMapper = new FullTextMapper(this, graphToTypedInstanceMapper);
- switch (operation) {
- case CREATE:
- List<String> ids = addOrUpdateAttributesAndTraits(operation, entitiesToCreate);
- addFullTextProperty(entitiesToCreate, fulltextMapper);
- requestContext.recordEntityCreate(ids);
- break;
-
- case UPDATE_FULL:
- case UPDATE_PARTIAL:
- ids = addOrUpdateAttributesAndTraits(Operation.CREATE, entitiesToCreate);
- requestContext.recordEntityCreate(ids);
- ids = addOrUpdateAttributesAndTraits(operation, entitiesToUpdate);
- requestContext.recordEntityUpdate(ids);
-
- addFullTextProperty(entitiesToCreate, fulltextMapper);
- addFullTextProperty(entitiesToUpdate, fulltextMapper);
- break;
-
- default:
- throw new UnsupportedOperationException("Not handled - " + operation);
- }
-
- for(ITypedReferenceableInstance instance : typedInstances) {
- addToEntityCache(requestContext, instance);
- }
- }
-
- private Collection<IReferenceableInstance> walkClassInstances(ITypedReferenceableInstance typedInstance)
- throws RepositoryException {
-
- EntityProcessor entityProcessor = new EntityProcessor();
- try {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Walking the object graph for instance {}", typedInstance.toShortString());
- }
-
- new ObjectGraphWalker(typeSystem, entityProcessor, typedInstance).walk();
- } catch (AtlasException me) {
- throw new RepositoryException("TypeSystem error when walking the ObjectGraph", me);
- }
-
- entityProcessor.addInstanceIfNotExists(typedInstance);
- return entityProcessor.getInstances();
- }
-
- private List<String> addOrUpdateAttributesAndTraits(Operation operation, List<ITypedReferenceableInstance> instances) throws AtlasException {
- List<String> guids = new ArrayList<>();
- for (ITypedReferenceableInstance instance : instances) {
- try {
- //new vertex, set all the properties
- String guid = addOrUpdateAttributesAndTraits(operation, instance);
- guids.add(guid);
- } catch (AtlasSchemaViolationException e) {
- throw new EntityExistsException(instance, e);
- }
- }
- return guids;
- }
-
- private String addOrUpdateAttributesAndTraits(Operation operation, ITypedReferenceableInstance typedInstance)
- throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding/Updating typed instance {}", typedInstance.toShortString());
- }
-
- Id id = typedInstance.getId();
- if (id == null) { // oops
- throw new RepositoryException("id cannot be null");
- }
-
- AtlasVertex instanceVertex = idToVertexMap.get(id);
-
- // add the attributes for the instance
- ClassType classType = typeSystem.getDataType(ClassType.class, typedInstance.getTypeName());
- final Map<String, AttributeInfo> fields = classType.fieldMapping().fields;
-
- mapInstanceToVertex(typedInstance, instanceVertex, fields, false, operation);
-
- if (Operation.CREATE.equals(operation)) {
- //TODO - Handle Trait updates
- addTraits(typedInstance, instanceVertex, classType);
- }
- return getId(typedInstance)._getId();
- }
-
- void mapInstanceToVertex(ITypedInstance typedInstance, AtlasVertex instanceVertex,
- Map<String, AttributeInfo> fields, boolean mapOnlyUniqueAttributes, Operation operation)
- throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping instance {} to vertex {}", typedInstance.toShortString(), string(instanceVertex));
- }
-
- for (AttributeInfo attributeInfo : fields.values()) {
- if (mapOnlyUniqueAttributes && !attributeInfo.isUnique) {
- continue;
- }
- mapAttributeToVertex(typedInstance, instanceVertex, attributeInfo, operation);
- }
- GraphHelper.setProperty(instanceVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- RequestContext.get().getRequestTime());
- GraphHelper.setProperty(instanceVertex, Constants.MODIFIED_BY_KEY, RequestContext.get().getUser());
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Setting modifiedBy: {} and modifiedTime: {}", RequestContext.get().getUser(), RequestContext.get().getRequestTime());
- }
- }
-
- void mapAttributeToVertex(ITypedInstance typedInstance, AtlasVertex instanceVertex,
- AttributeInfo attributeInfo, Operation operation) throws AtlasException {
-
- if ( typedInstance.isValueSet(attributeInfo.name) || operation == Operation.CREATE ) {
-
- Object attrValue = typedInstance.get(attributeInfo.name);
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping attribute {} = {}", attributeInfo.name, attrValue);
- }
-
- switch (attributeInfo.dataType().getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- mapPrimitiveOrEnumToVertex(typedInstance, instanceVertex, attributeInfo);
- break;
-
- case ARRAY:
- mapArrayCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
- break;
-
- case MAP:
- mapMapCollectionToVertex(typedInstance, instanceVertex, attributeInfo, operation);
- break;
-
- case STRUCT:
- case CLASS:
- String edgeLabel = graphHelper.getEdgeLabel(typedInstance, attributeInfo);
-
- AtlasEdge currentEdge = graphHelper.getEdgeForLabel(instanceVertex, edgeLabel);
- AtlasEdge newEdge = addOrUpdateReference(instanceVertex, attributeInfo, attributeInfo.dataType(),
- attrValue, currentEdge, edgeLabel, operation);
-
- if (currentEdge != null && !currentEdge.equals(newEdge)) {
- deleteHandler.deleteEdgeReference(currentEdge, attributeInfo.dataType().getTypeCategory(),
- attributeInfo.isComposite, true);
- }
- if (attributeInfo.reverseAttributeName != null && newEdge != null) {
- addReverseReference(instanceVertex, attributeInfo.reverseAttributeName, newEdge);
- }
- break;
-
- case TRAIT:
- // do NOTHING - this is taken care of earlier
- break;
-
- default:
- throw new IllegalArgumentException("Unknown type category: " + attributeInfo.dataType().getTypeCategory());
- }
- }
- }
-
- private TypeUtils.Pair<List<ITypedReferenceableInstance>, List<ITypedReferenceableInstance>> createVerticesAndDiscoverInstances(
- Collection<IReferenceableInstance> instances) throws AtlasException {
-
- List<ITypedReferenceableInstance> instancesToCreate = new ArrayList<>();
- List<ITypedReferenceableInstance> instancesToUpdate = new ArrayList<>();
-
- Map<Id,AtlasVertex> foundVertices = findExistingVertices(instances);
- //cache all the ids
- idToVertexMap.putAll(foundVertices);
-
- Set<Id> processedIds = new HashSet<>();
- for(IReferenceableInstance instance : instances) {
- Id id = instance.getId();
- if(processedIds.contains(id)) {
- continue;
- }
-
- AtlasVertex instanceVertex = foundVertices.get(id);
- ClassType classType = typeSystem.getDataType(ClassType.class, instance.getTypeName());
-
- if(instanceVertex == null) {
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Creating new vertex for instance {}", instance.toShortString());
- }
-
- ITypedReferenceableInstance newInstance = classType.convert(instance, Multiplicity.REQUIRED);
- instanceVertex = graphHelper.createVertexWithIdentity(newInstance, classType.getAllSuperTypeNames());
- instancesToCreate.add(newInstance);
-
- //Map only unique attributes for cases of circular references
- mapInstanceToVertex(newInstance, instanceVertex, classType.fieldMapping().fields, true, Operation.CREATE);
- idToVertexMap.put(id, instanceVertex);
-
- }
- else {
-
- if(LOG.isDebugEnabled()) {
- LOG.debug("Re-using existing vertex {} for instance {}", string(instanceVertex), instance.toShortString());
- }
-
- if (!(instance instanceof ITypedReferenceableInstance)) {
- throw new IllegalStateException(
- String.format("%s is not of type ITypedReferenceableInstance", instance.toShortString()));
- }
- ITypedReferenceableInstance existingInstance = (ITypedReferenceableInstance) instance;
- instancesToUpdate.add(existingInstance);
- }
- processedIds.add(id);
-
- }
- return TypeUtils.Pair.of(instancesToCreate, instancesToUpdate);
- }
-
- private Map<Id,AtlasVertex> findExistingVertices(Collection<IReferenceableInstance> instances) throws AtlasException {
-
- VertexLookupContext context = new VertexLookupContext(this);
- Map<Id,AtlasVertex> result = new HashMap<>();
-
- for(IReferenceableInstance instance : instances) {
- context.addInstance(instance);
- }
-
- List<Id> instancesToLoad = new ArrayList<>(context.getInstancesToLoadByGuid());
- List<String> guidsToLoad = Lists.transform(instancesToLoad, new Function<Id,String>() {
-
- @Override
- public String apply(Id instance) {
- Id id = getExistingId(instance);
- return id.id;
- }
-
- });
-
- Map<String, AtlasVertex> instanceVertices = graphHelper.getVerticesForGUIDs(guidsToLoad);
-
- List<String> missingGuids = new ArrayList<>();
- for(int i = 0 ; i < instancesToLoad.size(); i++) {
-
- String guid = guidsToLoad.get(i);
- AtlasVertex instanceVertex = instanceVertices.get(guid);
- if(instanceVertex == null) {
- missingGuids.add(guid);
- continue;
- }
-
- Id instance = instancesToLoad.get(i);
- if(LOG.isDebugEnabled()) {
- LOG.debug("Found vertex {} for instance {}", string(instanceVertex), instance);
- }
- result.put(instance, instanceVertex);
- }
-
- if(missingGuids.size() > 0) {
- throw new EntityNotFoundException("Could not find entities in the repository with the following GUIDs: " + missingGuids);
- }
-
- for(Map.Entry<ClassType,List<IReferenceableInstance>> entry : context.getInstancesToLoadByUniqueAttribute().entrySet()) {
- ClassType type = entry.getKey();
- List<IReferenceableInstance> instancesForClass = entry.getValue();
- List<AtlasVertex> correspondingVertices = graphHelper.getVerticesForInstancesByUniqueAttribute(type, instancesForClass);
- for(int i = 0; i < instancesForClass.size(); i++) {
- IReferenceableInstance inst = instancesForClass.get(i);
- AtlasVertex vertex = correspondingVertices.get(i);
- result.put(getExistingId(inst), vertex);
- }
- }
-
- return result;
- }
-
-
- private void addFullTextProperty(List<ITypedReferenceableInstance> instances, FullTextMapper fulltextMapper) throws AtlasException {
-
- if(! AtlasRepositoryConfiguration.isFullTextSearchEnabled()) {
- return;
- }
-
- for (ITypedReferenceableInstance typedInstance : instances) { // Traverse
- AtlasVertex instanceVertex = getClassVertex(typedInstance);
- String fullText = fulltextMapper.mapRecursive(instanceVertex, true);
- GraphHelper.setProperty(instanceVertex, Constants.ENTITY_TEXT_PROPERTY_KEY, fullText);
- }
- }
-
- private void addTraits(ITypedReferenceableInstance typedInstance, AtlasVertex instanceVertex, ClassType classType)
- throws AtlasException {
- for (String traitName : typedInstance.getTraits()) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("mapping trait {}", traitName);
- }
-
- GraphHelper.addProperty(instanceVertex, Constants.TRAIT_NAMES_PROPERTY_KEY, traitName);
- ITypedStruct traitInstance = (ITypedStruct) typedInstance.getTrait(traitName);
-
- // add the attributes for the trait instance
- mapTraitInstanceToVertex(traitInstance, classType, instanceVertex);
- }
- }
-
- /******************************************** ARRAY **************************************************/
-
- private void mapArrayCollectionToVertex(ITypedInstance typedInstance, AtlasVertex instanceVertex,
- AttributeInfo attributeInfo, Operation operation) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping instance {} for array attribute {} vertex {}", typedInstance.toShortString(),
- attributeInfo.name, string(instanceVertex));
- }
-
- List newElements = (List) typedInstance.get(attributeInfo.name);
- boolean newAttributeEmpty = (newElements == null || newElements.isEmpty());
-
- IDataType elementType = ((DataTypes.ArrayType) attributeInfo.dataType()).getElemType();
- String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
-
- List<Object> currentElements = GraphHelper.getArrayElementsProperty(elementType, instanceVertex, propertyName);
-
- List<Object> newElementsCreated = new ArrayList<>();
-
- if (!newAttributeEmpty) {
- int index = 0;
- for (; index < newElements.size(); index++) {
- Object currentElement = (currentElements != null && index < currentElements.size()) ?
- currentElements.get(index) : null;
- if (LOG.isDebugEnabled()) {
- LOG.debug("Adding/updating element at position {}, current element {}, new element {}", index,
- currentElement, newElements.get(index));
- }
-
- Object newEntry = addOrUpdateCollectionEntry(instanceVertex, attributeInfo, elementType,
- newElements.get(index), currentElement, propertyName, operation);
- newElementsCreated.add(newEntry);
- }
- }
-
- if(GraphHelper.isReference(elementType)) {
- if (attributeInfo.reverseAttributeName != null && newElementsCreated.size() > 0) {
- // Set/add the new reference value(s) on the reverse reference.
- for (Object newElement : newElementsCreated) {
- if ((newElement instanceof AtlasEdge)) {
- AtlasEdge newEdge = (AtlasEdge) newElement;
- addReverseReference(instanceVertex, attributeInfo.reverseAttributeName, newEdge);
- }
- else {
- throw new AtlasException("Invalid array element type " + newElement.getClass().getName() + " - expected " + AtlasEdge.class.getName() +
- " for reference " + GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo) + " on vertex " + GraphHelper.getVertexDetails(instanceVertex));
- }
- }
- }
-
- List<AtlasEdge> additionalEdges = removeUnusedEntries(instanceVertex, propertyName, (List)currentElements,
- (List)newElementsCreated, elementType, attributeInfo);
- newElementsCreated.addAll(additionalEdges);
- }
-
- // for dereference on way out
- GraphHelper.setArrayElementsProperty(elementType, instanceVertex, propertyName, newElementsCreated);
- }
-
- //Removes unused edges from the old collection, compared to the new collection
- private List<AtlasEdge> removeUnusedEntries(AtlasVertex instanceVertex, String edgeLabel,
- Collection<AtlasEdge> currentEntries,
- Collection<AtlasEdge> newEntries,
- IDataType entryType, AttributeInfo attributeInfo) throws AtlasException {
- if (currentEntries != null && !currentEntries.isEmpty()) {
- LOG.debug("Removing unused entries from the old collection");
- if (entryType.getTypeCategory() == TypeCategory.STRUCT
- || entryType.getTypeCategory() == TypeCategory.CLASS) {
-
- //Remove the edges for (current edges - new edges)
- List<AtlasEdge> cloneElements = new ArrayList<>(currentEntries);
- cloneElements.removeAll(newEntries);
- List<AtlasEdge> additionalElements = new ArrayList<>();
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Removing unused entries from the old collection - {}", cloneElements);
- }
-
- if (!cloneElements.isEmpty()) {
- for (AtlasEdge edge : cloneElements) {
- boolean deleted = deleteHandler.deleteEdgeReference(edge, entryType.getTypeCategory(),
- attributeInfo.isComposite, true);
- if (!deleted) {
- additionalElements.add(edge);
- }
- }
- }
- return additionalElements;
- }
- }
- return new ArrayList<>();
- }
-
- /******************************************** MAP **************************************************/
-
- private void mapMapCollectionToVertex(ITypedInstance typedInstance, AtlasVertex instanceVertex,
- AttributeInfo attributeInfo, Operation operation) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Mapping instance {} to vertex {} for attribute {}", typedInstance.toShortString(), string(instanceVertex),
- attributeInfo.name);
- }
-
- @SuppressWarnings("unchecked") Map<Object, Object> newAttribute =
- (Map<Object, Object>) typedInstance.get(attributeInfo.name);
-
- boolean newAttributeEmpty = (newAttribute == null || newAttribute.isEmpty());
-
- IDataType elementType = ((DataTypes.MapType) attributeInfo.dataType()).getValueType();
- String propertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
-
- Map<String, Object> currentMap = new HashMap<>();
- Map<String, Object> newMap = new HashMap<>();
-
- List<String> currentKeys = GraphHelper.getListProperty(instanceVertex, propertyName);
- if (currentKeys != null && !currentKeys.isEmpty()) {
- for (String key : currentKeys) {
- String propertyNameForKey = GraphHelper.getQualifiedNameForMapKey(propertyName, key);
- Object propertyValueForKey = GraphHelper.getMapValueProperty(elementType, instanceVertex, propertyNameForKey);
- currentMap.put(key, propertyValueForKey);
- }
- }
-
- if (!newAttributeEmpty) {
- for (Map.Entry<Object,Object> entry : newAttribute.entrySet()) {
- String keyStr = entry.getKey().toString();
- String propertyNameForKey = GraphHelper.getQualifiedNameForMapKey(propertyName, keyStr);
-
- Object newEntry = addOrUpdateCollectionEntry(instanceVertex, attributeInfo, elementType,
- entry.getValue(), currentMap.get(keyStr), propertyNameForKey, operation);
-
- //Add/Update/Remove property value
- GraphHelper.setMapValueProperty(elementType, instanceVertex, propertyNameForKey, newEntry);
- newMap.put(keyStr, newEntry);
- }
- }
-
- Map<String, Object> additionalMap =
- removeUnusedMapEntries(instanceVertex, propertyName, currentMap, newMap, elementType, attributeInfo);
-
- Set<String> newKeys = new HashSet<>(newMap.keySet());
- newKeys.addAll(additionalMap.keySet());
-
-
- // for dereference on way out
- GraphHelper.setListProperty(instanceVertex, propertyName, new ArrayList<>(newKeys));
- }
-
- //Remove unused entries from map
- private Map<String, Object> removeUnusedMapEntries(
- AtlasVertex instanceVertex, String propertyName,
- Map<String, Object> currentMap,
- Map<String, Object> newMap, IDataType elementType,
- AttributeInfo attributeInfo)
- throws AtlasException {
-
- Map<String, Object> additionalMap = new HashMap<>();
- for (String currentKey : currentMap.keySet()) {
-
- boolean shouldDeleteKey = !newMap.containsKey(currentKey);
- if (GraphHelper.isReference(elementType)) {
-
- //Delete the edge reference if its not part of new edges created/updated
- AtlasEdge currentEdge = (AtlasEdge)currentMap.get(currentKey);
-
- if (!newMap.values().contains(currentEdge)) {
-
- boolean deleted =
- deleteHandler.deleteEdgeReference(currentEdge, elementType.getTypeCategory(), attributeInfo.isComposite, true);
- if (!deleted) {
- additionalMap.put(currentKey, currentEdge);
- shouldDeleteKey = false;
- }
- }
- }
-
- if (shouldDeleteKey) {
- String propertyNameForKey = GraphHelper.getQualifiedNameForMapKey(propertyName, currentKey);
- GraphHelper.setProperty(instanceVertex, propertyNameForKey, null);
- }
- }
- return additionalMap;
- }
-
- /******************************************** ARRAY & MAP **************************************************/
-
- private Object addOrUpdateCollectionEntry(AtlasVertex instanceVertex, AttributeInfo attributeInfo,
- IDataType elementType, Object newAttributeValue, Object currentValue,
- String propertyName, Operation operation)
- throws AtlasException {
-
- switch (elementType.getTypeCategory()) {
- case PRIMITIVE:
- case ENUM:
- return newAttributeValue != null ? newAttributeValue : null;
-
- case ARRAY:
- case MAP:
- case TRAIT:
- // do nothing
- return null;
-
- case STRUCT:
- case CLASS:
- final String edgeLabel = GraphHelper.EDGE_LABEL_PREFIX + propertyName;
- return addOrUpdateReference(instanceVertex, attributeInfo, elementType, newAttributeValue, (AtlasEdge)currentValue,
- edgeLabel, operation);
-
- default:
- throw new IllegalArgumentException("Unknown type category: " + elementType.getTypeCategory());
- }
- }
-
- private AtlasEdge addOrUpdateReference(AtlasVertex instanceVertex, AttributeInfo attributeInfo,
- IDataType attributeType, Object newAttributeValue, AtlasEdge currentEdge,
- String edgeLabel, Operation operation) throws AtlasException {
- switch (attributeType.getTypeCategory()) {
- case STRUCT:
- return addOrUpdateStruct(instanceVertex, attributeInfo, (ITypedStruct) newAttributeValue, currentEdge,
- edgeLabel, operation);
-
- case CLASS:
- return addOrUpdateClassVertex(instanceVertex, currentEdge,
- (ITypedReferenceableInstance) newAttributeValue, attributeInfo, edgeLabel);
-
- default:
- throw new IllegalArgumentException("Unknown type category: " + attributeType.getTypeCategory());
- }
- }
- /******************************************** STRUCT **************************************************/
-
-
- private AtlasEdge addOrUpdateStruct(AtlasVertex instanceVertex, AttributeInfo attributeInfo,
- ITypedStruct newAttributeValue, AtlasEdge currentEdge,
- String edgeLabel, Operation operation) throws AtlasException {
- AtlasEdge newEdge = null;
- if (GraphHelper.elementExists(currentEdge) && newAttributeValue != null) {
- //update
- updateStructVertex(newAttributeValue, currentEdge, operation);
- newEdge = currentEdge;
- } else if (! GraphHelper.elementExists(currentEdge) && newAttributeValue != null) {
- //add
- newEdge = addStructVertex(newAttributeValue, instanceVertex, attributeInfo, edgeLabel);
- }
- return newEdge;
- }
-
- private AtlasEdge addStructVertex(ITypedStruct structInstance, AtlasVertex instanceVertex,
- AttributeInfo attributeInfo, String edgeLabel) throws AtlasException {
- // add a new vertex for the struct or trait instance
- AtlasVertex structInstanceVertex = graphHelper.createVertexWithoutIdentity(structInstance.getTypeName(), null,
- Collections.<String>emptySet()); // no super types for struct type
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("created vertex {} for struct {} value {}", string(structInstanceVertex), attributeInfo.name,
- structInstance.toShortString());
- }
-
- // map all the attributes to this new vertex
- mapInstanceToVertex(structInstance, structInstanceVertex, structInstance.fieldMapping().fields, false,
- Operation.CREATE);
- // add an edge to the newly created vertex from the parent
- AtlasEdge newEdge = graphHelper.getOrCreateEdge(instanceVertex, structInstanceVertex, edgeLabel);
-
- return newEdge;
- }
-
- private void updateStructVertex(ITypedStruct newAttributeValue, AtlasEdge currentEdge,
- Operation operation) throws AtlasException {
- //Already existing vertex. Update
- AtlasVertex structInstanceVertex = currentEdge.getInVertex();
-
- if (LOG.isDebugEnabled()) {
- LOG.debug("Updating struct vertex {} with struct {}", string(structInstanceVertex), newAttributeValue.toShortString());
- }
-
- // Update attributes
- final MessageDigest digester = SHA256Utils.getDigester();
- String newSignature = newAttributeValue.getSignatureHash(digester);
- String curSignature = GraphHelper.getSingleValuedProperty(structInstanceVertex, SIGNATURE_HASH_PROPERTY_KEY, String.class);
-
- if (!newSignature.equals(curSignature)) {
- //Update struct vertex instance only if there is a change
- if (LOG.isDebugEnabled()) {
- LOG.debug("Updating struct {} since signature has changed {} {} ", newAttributeValue, curSignature, newSignature);
- }
-
- mapInstanceToVertex(newAttributeValue, structInstanceVertex, newAttributeValue.fieldMapping().fields, false, operation);
- GraphHelper.setProperty(structInstanceVertex, SIGNATURE_HASH_PROPERTY_KEY, String.valueOf(newSignature));
- }
- }
-
- /******************************************** CLASS **************************************************/
-
- private AtlasEdge addOrUpdateClassVertex(AtlasVertex instanceVertex, AtlasEdge currentEdge,
- ITypedReferenceableInstance newAttributeValue, AttributeInfo attributeInfo,
- String edgeLabel) throws AtlasException {
- AtlasVertex newReferenceVertex = getClassVertex(newAttributeValue);
- if( ! GraphHelper.elementExists(newReferenceVertex) && newAttributeValue != null) {
- LOG.error("Could not find vertex for Class Reference {}", newAttributeValue);
- throw new EntityNotFoundException("Could not find vertex for Class Reference " + newAttributeValue);
- }
-
- AtlasEdge newEdge = null;
- if (GraphHelper.elementExists(currentEdge) && newAttributeValue != null) {
- newEdge = updateClassEdge(instanceVertex, currentEdge, newAttributeValue, newReferenceVertex,
- attributeInfo, edgeLabel);
- } else if (! GraphHelper.elementExists(currentEdge) && newAttributeValue != null){
- newEdge = addClassEdge(instanceVertex, newReferenceVertex, edgeLabel);
-
- }
- return newEdge;
- }
-
-
- private AtlasEdge addClassEdge(AtlasVertex instanceVertex, AtlasVertex toVertex, String edgeLabel) throws AtlasException {
- // add an edge to the class vertex from the instance
- return graphHelper.getOrCreateEdge(instanceVertex, toVertex, edgeLabel);
- }
-
- private <V,E> AtlasVertex<V,E> getClassVertex(ITypedReferenceableInstance typedReference) throws EntityNotFoundException {
- AtlasVertex<V,E> referenceVertex = null;
- Id id = null;
- if (typedReference != null) {
- id = getExistingId(typedReference);
- referenceVertex = idToVertexMap.get(id);
- if(referenceVertex == null && id.isAssigned()) {
- referenceVertex = graphHelper.getVertexForGUID(id.id);
- }
- }
-
- return referenceVertex;
- }
-
- Id getExistingId(IReferenceableInstance instance) {
- return instance instanceof Id ? (Id) instance : instance.getId();
- }
-
- private Id getId(ITypedReferenceableInstance typedReference) throws EntityNotFoundException {
- if (typedReference == null) {
- throw new IllegalArgumentException("typedReference must be non-null");
- }
- Id id = typedReference instanceof Id ? (Id) typedReference : typedReference.getId();
-
- if (id.isUnassigned()) {
- AtlasVertex classVertex = idToVertexMap.get(id);
- String guid = GraphHelper.getGuid(classVertex);
- id = new Id(guid, 0, typedReference.getTypeName());
- }
- return id;
- }
-
-
- private AtlasEdge updateClassEdge(AtlasVertex instanceVertex, AtlasEdge currentEdge,
- ITypedReferenceableInstance newAttributeValue,
- AtlasVertex newVertex, AttributeInfo attributeInfo,
- String edgeLabel) throws AtlasException {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Updating {} for reference attribute {}", string(currentEdge), attributeInfo.name);
- }
-
- // Update edge if it exists
- AtlasVertex currentVertex = currentEdge.getInVertex();
- String currentEntityId = GraphHelper.getGuid(currentVertex);
- String newEntityId = getId(newAttributeValue).id;
- AtlasEdge newEdge = currentEdge;
- if (!currentEntityId.equals(newEntityId)) {
- // add an edge to the class vertex from the instance
- if (newVertex != null) {
- newEdge = graphHelper.getOrCreateEdge(instanceVertex, newVertex, edgeLabel);
-
- }
- }
-
- return newEdge;
- }
-
- /******************************************** TRAITS ****************************************************/
-
- void mapTraitInstanceToVertex(ITypedStruct traitInstance, IDataType entityType, AtlasVertex parentInstanceVertex)
- throws AtlasException {
- // add a new AtlasVertex for the struct or trait instance
- final String traitName = traitInstance.getTypeName();
- AtlasVertex traitInstanceVertex = graphHelper.createVertexWithoutIdentity(traitInstance.getTypeName(), null,
- typeSystem.getDataType(TraitType.class, traitName).getAllSuperTypeNames());
- if (LOG.isDebugEnabled()) {
- LOG.debug("created vertex {} for trait {}", string(traitInstanceVertex), traitName);
- }
-
- // map all the attributes to this newly created AtlasVertex
- mapInstanceToVertex(traitInstance, traitInstanceVertex, traitInstance.fieldMapping().fields, false, Operation.CREATE);
-
- // add an edge to the newly created AtlasVertex from the parent
- String relationshipLabel = GraphHelper.getTraitLabel(entityType.getName(), traitName);
- graphHelper.getOrCreateEdge(parentInstanceVertex, traitInstanceVertex, relationshipLabel);
- }
-
- /******************************************** PRIMITIVES **************************************************/
-
- private void mapPrimitiveOrEnumToVertex(ITypedInstance typedInstance, AtlasVertex instanceVertex,
- AttributeInfo attributeInfo) throws AtlasException {
- Object attrValue = typedInstance.get(attributeInfo.name);
-
- final String vertexPropertyName = GraphHelper.getQualifiedFieldName(typedInstance, attributeInfo);
- Object propertyValue = null;
-
- if (attrValue == null) {
- propertyValue = null;
- } else if (attributeInfo.dataType() == DataTypes.STRING_TYPE) {
- propertyValue = typedInstance.getString(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.SHORT_TYPE) {
- propertyValue = typedInstance.getShort(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.INT_TYPE) {
- propertyValue = typedInstance.getInt(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.BIGINTEGER_TYPE) {
- propertyValue = typedInstance.getBigInt(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.BOOLEAN_TYPE) {
- propertyValue = typedInstance.getBoolean(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.BYTE_TYPE) {
- propertyValue = typedInstance.getByte(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.LONG_TYPE) {
- propertyValue = typedInstance.getLong(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.FLOAT_TYPE) {
- propertyValue = typedInstance.getFloat(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.DOUBLE_TYPE) {
- propertyValue = typedInstance.getDouble(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- propertyValue = typedInstance.getBigDecimal(attributeInfo.name);
- } else if (attributeInfo.dataType() == DataTypes.DATE_TYPE) {
- final Date dateVal = typedInstance.getDate(attributeInfo.name);
- //Convert Property value to Long while persisting
- if (dateVal != null) {
- propertyValue = dateVal.getTime();
- }
- } else if (attributeInfo.dataType().getTypeCategory() == TypeCategory.ENUM) {
- if (attrValue != null) {
- propertyValue = ((EnumValue) attrValue).value;
- }
- }
-
- GraphHelper.setProperty(instanceVertex, vertexPropertyName, propertyValue);
- }
-
- public AtlasVertex lookupVertex(Id refId) {
- return idToVertexMap.get(refId);
- }
-
- private void addToEntityCache(RequestContext context, ITypedReferenceableInstance instance)
- throws EntityNotFoundException {
-
- Id instanceId = instance.getId();
- if(instanceId.isUnassigned()) {
- if(instance instanceof ReferenceableInstance) {
- //When the id is unassigned, we can only cache the instance of it is
- //an instance of ReferenceableInstance, since replaceWithNewId is not
- //currently in the ITypedReferenceableInstance interface.
- Id id = getId(instance);
- ((ReferenceableInstance)instance).replaceWithNewId(id);
- context.cache(instance);
- }
- }
- else {
- context.cache(instance);
- }
- }
-
- public GuidMapping createGuidMapping() {
- Map<String,String> mapping = new HashMap<>(idToVertexMap.size());
- for(Map.Entry<Id, AtlasVertex> entry : idToVertexMap.entrySet()) {
- Id id = entry.getKey();
- if (id.isUnassigned()) {
- AtlasVertex classVertex = entry.getValue();
- mapping.put(id._getId(), GraphHelper.getGuid(classVertex));
- }
- }
- return new GuidMapping(mapping);
- }
-
-
- private <V,E> void addReverseReference(AtlasVertex<V,E> vertex, String reverseAttributeName, AtlasEdge<V,E> edge)
- throws AtlasException {
-
- String typeName = GraphHelper.getTypeName(vertex);
- Id id = GraphHelper.getIdFromVertex(typeName, vertex);
-
- AtlasVertex<V, E> reverseVertex = edge.getInVertex();
- String reverseTypeName = GraphHelper.getTypeName(reverseVertex);
- Id reverseId = GraphHelper.getIdFromVertex(reverseTypeName, reverseVertex);
- IDataType reverseType = typeSystem.getDataType(IDataType.class, reverseTypeName);
- AttributeInfo reverseAttrInfo = TypesUtil.getFieldMapping(reverseType).fields.get(reverseAttributeName);
- if (reverseAttrInfo.dataType().getTypeCategory() == TypeCategory.MAP) {
- // If the reverse reference is a map, what would be used as the key?
- // Not supporting automatic update of reverse map references.
- LOG.debug("Automatic update of reverse map reference is not supported - reference = {}",
- GraphHelper.getQualifiedFieldName(reverseType, reverseAttributeName));
- return;
- }
-
- String propertyName = GraphHelper.getQualifiedFieldName(reverseType, reverseAttributeName);
- String reverseEdgeLabel = GraphHelper.EDGE_LABEL_PREFIX + propertyName;
- AtlasEdge<V, E> reverseEdge = graphHelper.getEdgeForLabel(reverseVertex, reverseEdgeLabel);
-
- AtlasEdge<V, E> newEdge = null;
- if (reverseEdge != null) {
- newEdge = updateClassEdge(reverseVertex, reverseEdge, id, vertex, reverseAttrInfo, reverseEdgeLabel);
- }
- else {
- newEdge = addClassEdge(reverseVertex, vertex, reverseEdgeLabel);
- }
-
- switch (reverseAttrInfo.dataType().getTypeCategory()) {
- case CLASS:
- if (reverseEdge != null && !reverseEdge.getId().toString().equals(newEdge.getId().toString())) {
- // Disconnect old reference
- deleteHandler.deleteEdgeReference(reverseEdge, reverseAttrInfo.dataType().getTypeCategory(),
- reverseAttrInfo.isComposite, true);
- }
- break;
- case ARRAY:
- // Add edge ID to property value
- List<String> elements = reverseVertex.getProperty(propertyName, List.class);
- if (elements == null) {
- elements = new ArrayList<>();
- elements.add(newEdge.getId().toString());
- reverseVertex.setProperty(propertyName, elements);
- }
- else {
- if (!elements.contains(newEdge.getId().toString())) {
- elements.add(newEdge.getId().toString());
- reverseVertex.setProperty(propertyName, elements);
- }
- }
- break;
- }
-
- RequestContext requestContext = RequestContext.get();
- GraphHelper.setProperty(reverseVertex, Constants.MODIFICATION_TIMESTAMP_PROPERTY_KEY,
- requestContext.getRequestTime());
- requestContext.recordEntityUpdate(reverseId._getId());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/graph/VertexLookupContext.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/graph/VertexLookupContext.java b/repository/src/main/java/org/apache/atlas/repository/graph/VertexLookupContext.java
deleted file mode 100644
index dd90be6..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/graph/VertexLookupContext.java
+++ /dev/null
@@ -1,166 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- * <p>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.graph;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IDataType;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-
-/**
- * Helper class for TypedInstanceGraphMapper. Determines which instances
- * should be loaded by GUID and which ones should be loaded by unique attribute.
- * In addition, it sorts the instances that should be loaded by unique
- * attribute by class.
- *
- */
-public class VertexLookupContext {
-
- private final TypedInstanceToGraphMapper mapper;
-
- private static final TypeSystem typeSystem = TypeSystem.getInstance();
-
- private Map<ClassType,List<IReferenceableInstance>> instancesWithoutGuids = new HashMap<>();
- private Set<Id> guidsToLookup = new HashSet<>();
-
-
- /**
- * @param typedInstanceToGraphMapper
- */
- VertexLookupContext(TypedInstanceToGraphMapper typedInstanceToGraphMapper) {
- mapper = typedInstanceToGraphMapper;
- }
-
- /**
- * Adds an instance to be loaded.
- *
- */
- public void addInstance(IReferenceableInstance instance) throws AtlasException {
-
- ClassType classType = typeSystem.getDataType(ClassType.class, instance.getTypeName());
- ITypedReferenceableInstance newInstance = classType.convert(instance, Multiplicity.REQUIRED);
- findReferencedInstancesToPreLoad(newInstance);
- Id id = instance.getId();
- if(mapper.lookupVertex(id) == null) {
- if(id.isAssigned()) {
- guidsToLookup.add(id);
- }
- else {
- addToClassMap(classType, instance);
- }
- }
- }
-
- /**
- * Returns the instances that should be loaded by unique attribute, sorted by
- * class.
- *
- */
- public Map<ClassType,List<IReferenceableInstance>> getInstancesToLoadByUniqueAttribute() {
- return instancesWithoutGuids;
- }
-
- /**
- * Returns the Ids of the instance that should be loaded by GUID
- *
- * @return
- */
- public Set<Id> getInstancesToLoadByGuid() {
- return guidsToLookup;
- }
-
- private void addToClassMap(ClassType classType, IReferenceableInstance instance) throws AtlasException {
-
- List<IReferenceableInstance> toUpdate = instancesWithoutGuids.get(classType);
- if(toUpdate == null) {
- toUpdate = new ArrayList<>();
- instancesWithoutGuids.put(classType, toUpdate);
- }
- toUpdate.add(instance);
- }
-
- private void findReferencedInstancesToPreLoad(ITypedReferenceableInstance newInstance) throws AtlasException {
- //pre-load vertices for reference fields
- for(AttributeInfo info : newInstance.fieldMapping().fields.values()) {
-
- if(info.dataType().getTypeCategory() == TypeCategory.CLASS) {
- ITypedReferenceableInstance newAttributeValue = (ITypedReferenceableInstance)newInstance.get(info.name);
- addAdditionalInstance(newAttributeValue);
- }
-
- if(info.dataType().getTypeCategory() == TypeCategory.ARRAY) {
- IDataType elementType = ((DataTypes.ArrayType) info.dataType()).getElemType();
- if(elementType.getTypeCategory() == TypeCategory.CLASS) {
- List<ITypedReferenceableInstance> newElements = (List) newInstance.get(info.name);
- addAdditionalInstances(newElements);
- }
- }
-
- if(info.dataType().getTypeCategory() == TypeCategory.MAP) {
- IDataType elementType = ((DataTypes.MapType) info.dataType()).getValueType();
- if(elementType.getTypeCategory() == TypeCategory.CLASS) {
- Map<Object, ITypedReferenceableInstance> newAttribute =
- (Map<Object, ITypedReferenceableInstance>) newInstance.get(info.name);
-
- if(newAttribute != null) {
- addAdditionalInstances(newAttribute.values());
- }
- }
- }
- }
- }
-
- private void addAdditionalInstance(ITypedReferenceableInstance instance) {
-
- if(instance == null) {
- return;
- }
-
- Id id = mapper.getExistingId(instance);
- if(! id.isAssigned()) {
- return;
- }
- guidsToLookup.add(id);
- }
-
-
-
- private void addAdditionalInstances(Collection<ITypedReferenceableInstance> newElements) {
- if(newElements != null) {
- for(ITypedReferenceableInstance instance: newElements) {
- addAdditionalInstance(instance);
- }
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/AttributeStores.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/AttributeStores.java b/repository/src/main/java/org/apache/atlas/repository/memory/AttributeStores.java
deleted file mode 100755
index 1fb9d69..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/AttributeStores.java
+++ /dev/null
@@ -1,632 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import it.unimi.dsi.fastutil.booleans.BooleanArrayList;
-import it.unimi.dsi.fastutil.bytes.ByteArrayList;
-import it.unimi.dsi.fastutil.doubles.DoubleArrayList;
-import it.unimi.dsi.fastutil.floats.FloatArrayList;
-import it.unimi.dsi.fastutil.ints.IntArrayList;
-import it.unimi.dsi.fastutil.longs.LongArrayList;
-import it.unimi.dsi.fastutil.shorts.ShortArrayList;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.IConstructableType;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-@Deprecated
-public class AttributeStores {
-
- private static final Object NULL_VAL = new Object();
-
- static IAttributeStore createStore(AttributeInfo i) throws RepositoryException {
- switch (i.dataType().getTypeCategory()) {
- case PRIMITIVE:
- if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
- return new BooleanAttributeStore(i);
- } else if (i.dataType() == DataTypes.BYTE_TYPE) {
- return new ByteAttributeStore(i);
- } else if (i.dataType() == DataTypes.SHORT_TYPE) {
- return new ShortAttributeStore(i);
- } else if (i.dataType() == DataTypes.INT_TYPE) {
- return new IntAttributeStore(i);
- } else if (i.dataType() == DataTypes.LONG_TYPE) {
- return new LongAttributeStore(i);
- } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
- return new FloatAttributeStore(i);
- } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
- return new DoubleAttributeStore(i);
- } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
- return new BigIntStore(i);
- } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
- return new BigDecimalStore(i);
- } else if (i.dataType() == DataTypes.DATE_TYPE) {
- return new DateStore(i);
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- return new StringStore(i);
- } else if (i.dataType() == DataTypes.STRING_TYPE) {
- return new StringStore(i);
- } else {
- throw new RepositoryException(String.format("Unknown datatype %s", i.dataType()));
- }
- case ENUM:
- return new IntAttributeStore(i);
- case ARRAY:
- return new ImmutableListStore(i);
- case MAP:
- return new ImmutableMapStore(i);
- case STRUCT:
- return new StructStore(i);
- case CLASS:
- return new IdStore(i);
- default:
- throw new RepositoryException(String.format("Unknown Category for datatype %s", i.dataType()));
- }
- }
-
- static abstract class AbstractAttributeStore implements IAttributeStore {
- final BooleanArrayList nullList;
- final Map<Integer, Map<String, Object>> hiddenVals;
- AttributeInfo attrInfo;
-
- AbstractAttributeStore(AttributeInfo attrInfo) {
- this.attrInfo = attrInfo;
- this.nullList = new BooleanArrayList();
- hiddenVals = new HashMap<>();
- }
-
- final void setNull(int pos, boolean flag) {
- nullList.set(pos, flag);
- }
-
- final boolean getNull(int pos) {
- return nullList.get(pos);
- }
-
- void storeHiddenVals(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
- List<String> attrNames = type.getNames(attrInfo);
- Map<String, Object> m = hiddenVals.get(pos);
- if (m == null) {
- m = new HashMap<>();
- hiddenVals.put(pos, m);
- }
- for (int i = 2; i < attrNames.size(); i++) {
- String attrName = attrNames.get(i);
- int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
- int colPos = instance.fieldMapping().fieldPos.get(attrName);
- if (instance.nullFlags[nullPos]) {
- m.put(attrName, NULL_VAL);
- } else {
- //m.put(attrName, instance.bools[colPos]);
- store(instance, colPos, attrName, m);
- }
- }
- }
-
- void loadHiddenVals(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
- List<String> attrNames = type.getNames(attrInfo);
- Map<String, Object> m = hiddenVals.get(pos);
- for (int i = 2; i < attrNames.size(); i++) {
- String attrName = attrNames.get(i);
- int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
- int colPos = instance.fieldMapping().fieldPos.get(attrName);
- Object val = m == null ? NULL_VAL : m.get(attrName);
- if (val == NULL_VAL) {
- instance.nullFlags[nullPos] = true;
- } else {
- instance.nullFlags[nullPos] = false;
- load(instance, colPos, val);
- }
- }
- }
-
- @Override
- public void store(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
- List<String> attrNames = type.getNames(attrInfo);
- String attrName = attrNames.get(0);
- int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
- int colPos = instance.fieldMapping().fieldPos.get(attrName);
- nullList.set(pos, instance.nullFlags[nullPos]);
-
- if (pos == nullList.size()) {
- nullList.add(instance.nullFlags[nullPos]);
- } else {
- nullList.set(pos, instance.nullFlags[nullPos]);
- }
- //list.set(pos, instance.bools[colPos]);
- store(instance, colPos, pos);
-
- if (attrNames.size() > 1) {
- storeHiddenVals(pos, type, instance);
- }
- }
-
- @Override
- public void load(int pos, IConstructableType type, StructInstance instance) throws RepositoryException {
- List<String> attrNames = type.getNames(attrInfo);
- String attrName = attrNames.get(0);
- int nullPos = instance.fieldMapping().fieldNullPos.get(attrName);
- int colPos = instance.fieldMapping().fieldPos.get(attrName);
-
- if (nullList.get(pos)) {
- instance.nullFlags[nullPos] = true;
- } else {
- instance.nullFlags[nullPos] = false;
- load(instance, colPos, pos);
- }
-
- if (attrNames.size() > 1) {
- loadHiddenVals(pos, type, instance);
- }
- }
-
- /*
- * store the value from colPos in instance into the list.
- */
- protected abstract void store(StructInstance instance, int colPos, int pos) throws RepositoryException;
-
- /*
- * load the value from pos in list into colPos in instance.
- */
- protected abstract void load(StructInstance instance, int colPos, int pos) throws RepositoryException;
-
- /*
- * store the value from colPos in map as attrName
- */
- protected abstract void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m);
-
- /*
- * load the val into colPos in instance.
- */
- protected abstract void load(StructInstance instance, int colPos, Object val);
-
- }
-
- static abstract class PrimitiveAttributeStore extends AbstractAttributeStore implements IAttributeStore {
-
-
- public PrimitiveAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- }
-
- }
-
- static class BooleanAttributeStore extends PrimitiveAttributeStore {
-
- final BooleanArrayList list;
-
- BooleanAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new BooleanArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.bools[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.bools[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.bools[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.bools[colPos] = (Boolean) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class ByteAttributeStore extends PrimitiveAttributeStore {
-
- final ByteArrayList list;
-
- ByteAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new ByteArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.bytes[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.bytes[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.bytes[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.bytes[colPos] = (Byte) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class ShortAttributeStore extends PrimitiveAttributeStore {
-
- final ShortArrayList list;
-
- ShortAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new ShortArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.shorts[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.shorts[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.shorts[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.shorts[colPos] = (Short) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class IntAttributeStore extends PrimitiveAttributeStore {
-
- final IntArrayList list;
-
- IntAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new IntArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.ints[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.ints[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.ints[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.ints[colPos] = (Integer) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class LongAttributeStore extends PrimitiveAttributeStore {
-
- final LongArrayList list;
-
- LongAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new LongArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.longs[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.longs[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.longs[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.longs[colPos] = (Long) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class FloatAttributeStore extends PrimitiveAttributeStore {
-
- final FloatArrayList list;
-
- FloatAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new FloatArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.floats[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.floats[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.floats[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.floats[colPos] = (Float) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static class DoubleAttributeStore extends PrimitiveAttributeStore {
-
- final DoubleArrayList list;
-
- DoubleAttributeStore(AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = new DoubleArrayList();
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.doubles[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.doubles[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.doubles[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.doubles[colPos] = (Double) val;
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- list.size(pos + 1);
- nullList.size(pos + 1);
- }
- }
-
- static abstract class ObjectAttributeStore<T> extends AbstractAttributeStore {
-
- final ArrayList<T> list;
-
- ObjectAttributeStore(Class<T> cls, AttributeInfo attrInfo) {
- super(attrInfo);
- this.list = Lists.newArrayList((T) null);
- }
-
- @Override
- public void ensureCapacity(int pos) throws RepositoryException {
- while (list.size() < pos + 1) {
- list.add(null);
- }
- nullList.size(pos + 1);
- }
- }
-
- static class BigIntStore extends ObjectAttributeStore<BigInteger> {
-
- public BigIntStore(AttributeInfo attrInfo) {
- super(BigInteger.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.bigIntegers[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.bigIntegers[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.bigIntegers[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.bigIntegers[colPos] = (BigInteger) val;
- }
-
- }
-
- static class BigDecimalStore extends ObjectAttributeStore<BigDecimal> {
-
- public BigDecimalStore(AttributeInfo attrInfo) {
- super(BigDecimal.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.bigDecimals[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.bigDecimals[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.bigDecimals[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.bigDecimals[colPos] = (BigDecimal) val;
- }
-
- }
-
- static class DateStore extends ObjectAttributeStore<Date> {
-
- public DateStore(AttributeInfo attrInfo) {
- super(Date.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.dates[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.dates[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.dates[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.dates[colPos] = (Date) val;
- }
-
- }
-
- static class StringStore extends ObjectAttributeStore<String> {
-
- public StringStore(AttributeInfo attrInfo) {
- super(String.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.strings[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.strings[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.strings[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.strings[colPos] = (String) val;
- }
-
- }
-
- static class IdStore extends ObjectAttributeStore<Id> {
-
- public IdStore(AttributeInfo attrInfo) {
- super(Id.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.ids[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.ids[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.ids[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.ids[colPos] = (Id) val;
- }
-
- }
-
- static class ImmutableListStore extends ObjectAttributeStore<ImmutableList> {
-
- public ImmutableListStore(AttributeInfo attrInfo) {
- super(ImmutableList.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.arrays[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.arrays[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.arrays[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.arrays[colPos] = (ImmutableList) val;
- }
-
- }
-
- static class ImmutableMapStore extends ObjectAttributeStore<ImmutableMap> {
-
- public ImmutableMapStore(AttributeInfo attrInfo) {
- super(ImmutableMap.class, attrInfo);
- }
-
- protected void store(StructInstance instance, int colPos, int pos) {
- list.set(pos, instance.maps[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, int pos) {
- instance.maps[colPos] = list.get(pos);
- }
-
- protected void store(StructInstance instance, int colPos, String attrName, Map<String, Object> m) {
- m.put(attrName, instance.maps[colPos]);
- }
-
- protected void load(StructInstance instance, int colPos, Object val) {
- instance.maps[colPos] = (ImmutableMap) val;
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/ClassStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/ClassStore.java b/repository/src/main/java/org/apache/atlas/repository/memory/ClassStore.java
deleted file mode 100755
index a74c79e..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/ClassStore.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.types.ClassType;
-
-import java.util.ArrayList;
-import java.util.Objects;
-
-@Deprecated
-public class ClassStore extends HierarchicalTypeStore {
-
- final ArrayList<ImmutableList<String>> traitNamesStore;
- final ClassType classType;
-
- public ClassStore(MemRepository repository, ClassType hierarchicalType) throws RepositoryException {
- super(repository, hierarchicalType);
- classType = hierarchicalType;
- traitNamesStore = new ArrayList<>();
- }
-
- void store(ReferenceableInstance i) throws RepositoryException {
- super.store(i);
- int pos = idPosMap.get(i.getId());
- traitNamesStore.set(pos, i.getTraits());
- }
-
- public void ensureCapacity(int pos) throws RepositoryException {
- super.ensureCapacity(pos);
- while (traitNamesStore.size() < pos + 1) {
- traitNamesStore.add(null);
- }
- }
-
- boolean validate(MemRepository repo, Id id) throws RepositoryException {
- if (id.isUnassigned()) {
- throw new RepositoryException(String.format("Invalid Id (unassigned) : %s", id));
- }
- Integer pos = idPosMap.get(id);
- if (pos == null) {
- throw new RepositoryException(String.format("Invalid Id (unknown) : %s", id));
- }
-
- String typeName = typeNameList.get(pos);
- if (!Objects.equals(typeName, hierarchicalType.getName())) {
- throw new RepositoryException(
- String.format("Invalid Id (incorrect typeName, type is %s) : %s", typeName, id));
- }
-
- return true;
- }
-
- /*
- * - assumes id is already validated
- */
- ReferenceableInstance createInstance(MemRepository repo, Id id) throws RepositoryException {
- Integer pos = idPosMap.get(id);
- String typeName = typeNameList.get(pos);
- if (!Objects.equals(typeName, hierarchicalType.getName())) {
- return repo.getClassStore(typeName).createInstance(repo, id);
- }
-
- ImmutableList<String> traitNames = traitNamesStore.get(pos);
- String[] tNs = traitNames.toArray(new String[]{});
-
- try {
- return (ReferenceableInstance) classType.createInstance(id, tNs);
- } catch (AtlasException me) {
- throw new RepositoryException(me);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/HierarchicalTypeStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/HierarchicalTypeStore.java b/repository/src/main/java/org/apache/atlas/repository/memory/HierarchicalTypeStore.java
deleted file mode 100755
index 429730c..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/HierarchicalTypeStore.java
+++ /dev/null
@@ -1,207 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import com.google.common.collect.ImmutableBiMap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import org.apache.atlas.typesystem.types.AttributeInfo;
-import org.apache.atlas.typesystem.types.HierarchicalType;
-import org.apache.atlas.typesystem.types.IConstructableType;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-@Deprecated
-public abstract class HierarchicalTypeStore {
-
- final MemRepository repository;
- final IConstructableType hierarchicalType;
- final ArrayList<String> typeNameList;
- final ImmutableMap<AttributeInfo, IAttributeStore> attrStores;
- final ImmutableList<HierarchicalTypeStore> superTypeStores;
-
-
- /**
- * Map Id to position in storage lists.
- */
- Map<Id, Integer> idPosMap;
-
- List<Integer> freePositions;
-
- int nextPos;
-
- /**
- * Lock for each Class/Trait.
- */
- ReentrantReadWriteLock lock;
-
- HierarchicalTypeStore(MemRepository repository, HierarchicalType hierarchicalType) throws RepositoryException {
- this.hierarchicalType = (IConstructableType) hierarchicalType;
- this.repository = repository;
- ImmutableMap.Builder<AttributeInfo, IAttributeStore> b =
- new ImmutableBiMap.Builder<>();
- typeNameList = Lists.newArrayList((String) null);
- ImmutableList<AttributeInfo> l = hierarchicalType.immediateAttrs;
- for (AttributeInfo i : l) {
- b.put(i, AttributeStores.createStore(i));
- }
- attrStores = b.build();
-
- ImmutableList.Builder<HierarchicalTypeStore> b1 = new ImmutableList.Builder<>();
- Set<String> allSuperTypeNames = hierarchicalType.getAllSuperTypeNames();
- for (String s : allSuperTypeNames) {
- b1.add(repository.getStore(s));
- }
- superTypeStores = b1.build();
-
- nextPos = 0;
- idPosMap = new HashMap<>();
- freePositions = new ArrayList<>();
-
- lock = new ReentrantReadWriteLock();
- }
-
- /**
- * Assign a storage position to an Id.
- * - try to assign from freePositions
- * - ensure storage capacity.
- * - add entry in idPosMap.
- * @param id
- * @return
- * @throws RepositoryException
- */
- int assignPosition(Id id) throws RepositoryException {
-
- int pos = -1;
- if (!freePositions.isEmpty()) {
- pos = freePositions.remove(0);
- } else {
- pos = nextPos++;
- ensureCapacity(pos);
- }
-
- idPosMap.put(id, pos);
-
- for (HierarchicalTypeStore s : superTypeStores) {
- s.assignPosition(id);
- }
-
- return pos;
- }
-
- /**
- * - remove from idPosMap
- * - add to freePositions.
- * @throws RepositoryException
- */
- void releaseId(Id id) {
-
- Integer pos = idPosMap.get(id);
- if (pos != null) {
- idPosMap.remove(id);
- freePositions.add(pos);
-
- for (HierarchicalTypeStore s : superTypeStores) {
- s.releaseId(id);
- }
- }
- }
-
- void acquireReadLock() {
- lock.readLock().lock();
- }
-
- void acquireWriteLock() {
- lock.writeLock().lock();
- }
-
- void releaseReadLock() {
- lock.readLock().unlock();
- }
-
- void releaseWriteLock() {
- lock.writeLock().unlock();
- }
-
- protected void storeFields(int pos, StructInstance s) throws RepositoryException {
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.store(pos, hierarchicalType, s);
- }
- }
-
- protected void loadFields(int pos, StructInstance s) throws RepositoryException {
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.load(pos, hierarchicalType, s);
- }
- }
-
- /**
- * - store the typeName
- * - store the immediate attributes in the respective IAttributeStore
- * - call store on each SuperType.
- * @param i
- * @throws RepositoryException
- */
- void store(ReferenceableInstance i) throws RepositoryException {
- int pos = idPosMap.get(i.getId());
- typeNameList.set(pos, i.getTypeName());
- storeFields(pos, i);
-
- for (HierarchicalTypeStore s : superTypeStores) {
- s.store(i);
- }
- }
-
- /**
- * - copy over the immediate attribute values from the respective IAttributeStore
- * - call load on each SuperType.
- * @param i
- * @throws RepositoryException
- */
- void load(ReferenceableInstance i) throws RepositoryException {
- int pos = idPosMap.get(i.getId());
- loadFields(pos, i);
-
- for (HierarchicalTypeStore s : superTypeStores) {
- s.load(i);
- }
- }
-
- public void ensureCapacity(int pos) throws RepositoryException {
- while (typeNameList.size() < pos + 1) {
- typeNameList.add(null);
- }
- for (Map.Entry<AttributeInfo, IAttributeStore> e : attrStores.entrySet()) {
- IAttributeStore attributeStore = e.getValue();
- attributeStore.ensureCapacity(pos);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/memory/IAttributeStore.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/memory/IAttributeStore.java b/repository/src/main/java/org/apache/atlas/repository/memory/IAttributeStore.java
deleted file mode 100755
index b8cbe05..0000000
--- a/repository/src/main/java/org/apache/atlas/repository/memory/IAttributeStore.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.repository.memory;
-
-import org.apache.atlas.repository.RepositoryException;
-import org.apache.atlas.typesystem.persistence.StructInstance;
-import org.apache.atlas.typesystem.types.IConstructableType;
-
-@Deprecated
-public interface IAttributeStore {
- /**
- * Store the attribute's value from the 'instance' into this store.
- * @param pos
- * @param instance
- * @throws RepositoryException
- */
- void store(int pos, IConstructableType type, StructInstance instance) throws RepositoryException;
-
- /**
- * load the Instance with the value from position 'pos' for the attribute.
- * @param pos
- * @param instance
- * @throws RepositoryException
- */
- void load(int pos, IConstructableType type, StructInstance instance) throws RepositoryException;
-
- /**
- * Ensure store have space for the given pos.
- * @param pos
- * @throws RepositoryException
- */
- void ensureCapacity(int pos) throws RepositoryException;
-}
[02/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
index e753881..98df12b 100644
--- a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
+++ b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntitiesREST.java
@@ -18,7 +18,6 @@
package org.apache.atlas.web.adapters;
import org.apache.atlas.AtlasClient;
-import org.apache.atlas.RequestContext;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.TestModules;
import org.apache.atlas.TestUtilsV2;
@@ -99,7 +98,6 @@ public class TestEntitiesREST {
@AfterMethod
public void cleanup() throws Exception {
- RequestContext.clear();
RequestContextV1.clear();
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
index b90ea64..ea6fe31 100644
--- a/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
+++ b/webapp/src/test/java/org/apache/atlas/web/adapters/TestEntityREST.java
@@ -18,7 +18,6 @@
package org.apache.atlas.web.adapters;
import org.apache.atlas.TestModules;
-import org.apache.atlas.RequestContext;
import org.apache.atlas.RequestContextV1;
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.model.instance.AtlasClassification;
@@ -77,7 +76,6 @@ public class TestEntityREST {
@AfterMethod
public void cleanup() throws Exception {
- RequestContext.clear();
RequestContextV1.clear();
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/filters/AtlasAuthenticationKerberosFilterTest.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/filters/AtlasAuthenticationKerberosFilterTest.java b/webapp/src/test/java/org/apache/atlas/web/filters/AtlasAuthenticationKerberosFilterTest.java
index 5628b17..a3f5601 100644
--- a/webapp/src/test/java/org/apache/atlas/web/filters/AtlasAuthenticationKerberosFilterTest.java
+++ b/webapp/src/test/java/org/apache/atlas/web/filters/AtlasAuthenticationKerberosFilterTest.java
@@ -16,7 +16,7 @@
*/
package org.apache.atlas.web.filters;
-import org.apache.atlas.RequestContext;
+import org.apache.atlas.RequestContextV1;
import org.apache.atlas.web.security.BaseSecurityTest;
import org.apache.atlas.web.service.EmbeddedServer;
import org.apache.commons.configuration.PropertiesConfiguration;
@@ -111,7 +111,7 @@ public class AtlasAuthenticationKerberosFilterTest extends BaseSecurityTest {
connection.connect();
assertEquals(connection.getResponseCode(), 200);
- assertEquals(RequestContext.get().getUser(), TESTUSER);
+ assertEquals(RequestContextV1.get().getUser(), TESTUSER);
return null;
}
});
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
index 512750f..ab27612 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/BaseResourceIT.java
@@ -19,8 +19,6 @@
package org.apache.atlas.web.integration;
import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.ApplicationProperties;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasClientV2;
@@ -32,26 +30,21 @@ import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasStruct;
import org.apache.atlas.model.instance.EntityMutationResponse;
import org.apache.atlas.model.instance.EntityMutations;
-import org.apache.atlas.model.typedef.AtlasClassificationDef;
-import org.apache.atlas.model.typedef.AtlasEntityDef;
-import org.apache.atlas.model.typedef.AtlasEnumDef;
-import org.apache.atlas.model.typedef.AtlasStructDef;
+import org.apache.atlas.model.typedef.*;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality;
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
-import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.v1.model.typedef.EnumTypeDefinition.EnumValue;
import org.apache.atlas.notification.NotificationConsumer;
import org.apache.atlas.kafka.*;
-import org.apache.atlas.notification.entity.EntityNotification;
-import org.apache.atlas.notification.hook.HookNotification;
+import org.apache.atlas.v1.model.notification.EntityNotificationV1;
+import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
@@ -80,31 +73,30 @@ import static org.testng.Assert.assertTrue;
* Sets up the web resource and has helper methods to created type and entity.
*/
public abstract class BaseResourceIT {
+ public static final Logger LOG = LoggerFactory.getLogger(BaseResourceIT.class);
public static final String ATLAS_REST_ADDRESS = "atlas.rest.address";
- public static final String NAME = "name";
- public static final String QUALIFIED_NAME = "qualifiedName";
- public static final String CLUSTER_NAME = "clusterName";
- public static final String DESCRIPTION = "description";
- public static final String PII_TAG = "pii_Tag";
- public static final String PHI_TAG = "phi_Tag";
- public static final String PCI_TAG = "pci_Tag";
- public static final String SOX_TAG = "sox_Tag";
- public static final String SEC_TAG = "sec_Tag";
- public static final String FINANCE_TAG = "finance_Tag";
- public static final String CLASSIFICATION = "classification";
+ public static final String NAME = "name";
+ public static final String QUALIFIED_NAME = "qualifiedName";
+ public static final String CLUSTER_NAME = "clusterName";
+ public static final String DESCRIPTION = "description";
+ public static final String PII_TAG = "pii_Tag";
+ public static final String PHI_TAG = "phi_Tag";
+ public static final String PCI_TAG = "pci_Tag";
+ public static final String SOX_TAG = "sox_Tag";
+ public static final String SEC_TAG = "sec_Tag";
+ public static final String FINANCE_TAG = "finance_Tag";
+ public static final String CLASSIFICATION = "classification";
+
+ protected static final int MAX_WAIT_TIME = 60000;
// All service clients
- protected AtlasClient atlasClientV1;
+ protected AtlasClient atlasClientV1;
protected AtlasClientV2 atlasClientV2;
-
- public static final Logger LOG = LoggerFactory.getLogger(BaseResourceIT.class);
- protected static final int MAX_WAIT_TIME = 60000;
- protected String[] atlasUrls;
+ protected String[] atlasUrls;
@BeforeClass
public void setUp() throws Exception {
-
//set high timeouts so that tests do not fail due to read timeouts while you
//are stepping through the code in a debugger
ApplicationProperties.get().setProperty("atlas.client.readTimeoutMSecs", "100000000");
@@ -112,6 +104,7 @@ public abstract class BaseResourceIT {
Configuration configuration = ApplicationProperties.get();
+
atlasUrls = configuration.getStringArray(ATLAS_REST_ADDRESS);
if (atlasUrls == null || atlasUrls.length == 0) {
@@ -129,6 +122,7 @@ public abstract class BaseResourceIT {
protected void batchCreateTypes(AtlasTypesDef typesDef) throws AtlasServiceException {
AtlasTypesDef toCreate = new AtlasTypesDef();
+
for (AtlasEnumDef enumDef : typesDef.getEnumDefs()) {
if (atlasClientV2.typeWithNameExists(enumDef.getName())) {
LOG.warn("Type with name {} already exists. Skipping", enumDef.getName());
@@ -170,103 +164,107 @@ public abstract class BaseResourceIT {
}
protected List<String> createType(TypesDef typesDef) throws Exception {
- List<EnumTypeDefinition> enumTypes = new ArrayList<>();
+ List<EnumTypeDefinition> enumTypes = new ArrayList<>();
List<StructTypeDefinition> structTypes = new ArrayList<>();
- List<HierarchicalTypeDefinition<TraitType>> traitTypes = new ArrayList<>();
- List<HierarchicalTypeDefinition<ClassType>> classTypes = new ArrayList<>();
+ List<TraitTypeDefinition> traitTypes = new ArrayList<>();
+ List<ClassTypeDefinition> classTypes = new ArrayList<>();
- for (EnumTypeDefinition enumTypeDefinition : typesDef.enumTypesAsJavaList()) {
- if (atlasClientV2.typeWithNameExists(enumTypeDefinition.name)) {
- LOG.warn("Type with name {} already exists. Skipping", enumTypeDefinition.name);
+ for (EnumTypeDefinition enumTypeDefinition : typesDef.getEnumTypes()) {
+ if (atlasClientV2.typeWithNameExists(enumTypeDefinition.getName())) {
+ LOG.warn("Type with name {} already exists. Skipping", enumTypeDefinition.getName());
} else {
enumTypes.add(enumTypeDefinition);
}
}
- for (StructTypeDefinition structTypeDefinition : typesDef.structTypesAsJavaList()) {
- if (atlasClientV2.typeWithNameExists(structTypeDefinition.typeName)) {
- LOG.warn("Type with name {} already exists. Skipping", structTypeDefinition.typeName);
+
+ for (StructTypeDefinition structTypeDefinition : typesDef.getStructTypes()) {
+ if (atlasClientV2.typeWithNameExists(structTypeDefinition.getTypeName())) {
+ LOG.warn("Type with name {} already exists. Skipping", structTypeDefinition.getTypeName());
} else {
structTypes.add(structTypeDefinition);
}
}
- for (HierarchicalTypeDefinition<TraitType> hierarchicalTypeDefinition : typesDef.traitTypesAsJavaList()) {
- if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.typeName)) {
- LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.typeName);
+
+ for (TraitTypeDefinition hierarchicalTypeDefinition : typesDef.getTraitTypes()) {
+ if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.getTypeName())) {
+ LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.getTypeName());
} else {
traitTypes.add(hierarchicalTypeDefinition);
}
}
- for (HierarchicalTypeDefinition<ClassType> hierarchicalTypeDefinition : typesDef.classTypesAsJavaList()) {
- if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.typeName)) {
- LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.typeName);
+
+ for (ClassTypeDefinition hierarchicalTypeDefinition : typesDef.getClassTypes()) {
+ if (atlasClientV2.typeWithNameExists(hierarchicalTypeDefinition.getTypeName())) {
+ LOG.warn("Type with name {} already exists. Skipping", hierarchicalTypeDefinition.getTypeName());
} else {
classTypes.add(hierarchicalTypeDefinition);
}
}
- TypesDef toCreate = TypesUtil.getTypesDef(ImmutableList.copyOf(enumTypes),
- ImmutableList.copyOf(structTypes),
- ImmutableList.copyOf(traitTypes),
- ImmutableList.copyOf(classTypes));
+ TypesDef toCreate = new TypesDef(enumTypes, structTypes, traitTypes, classTypes);
+
return atlasClientV1.createType(toCreate);
}
protected List<String> createType(String typesAsJSON) throws Exception {
- return createType(TypesSerialization.fromJson(typesAsJSON));
+ return createType(AtlasType.fromV1Json(typesAsJSON, TypesDef.class));
}
protected Id createInstance(Referenceable referenceable) throws Exception {
String typeName = referenceable.getTypeName();
+
System.out.println("creating instance of type " + typeName);
List<String> guids = atlasClientV1.createEntity(referenceable);
+
System.out.println("created instance for type " + typeName + ", guid: " + guids);
// return the reference to created instance with guid
if (guids.size() > 0) {
return new Id(guids.get(guids.size() - 1), 0, referenceable.getTypeName());
}
+
return null;
}
- protected TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums,
- ImmutableList<StructTypeDefinition> structs,
- ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
- ImmutableList<HierarchicalTypeDefinition<ClassType>> classes){
- enums = (enums != null) ? enums : ImmutableList
- .<EnumTypeDefinition>of();
- structs =
- (structs != null) ? structs : ImmutableList.<StructTypeDefinition>of();
-
- traits = (traits != null) ? traits : ImmutableList
- .<HierarchicalTypeDefinition<TraitType>>of();
-
- classes = (classes != null) ? classes : ImmutableList
- .<HierarchicalTypeDefinition<ClassType>>of();
- return TypesUtil.getTypesDef(enums, structs, traits, classes);
+ protected TypesDef getTypesDef(List<EnumTypeDefinition> enums,
+ List<StructTypeDefinition> structs,
+ List<TraitTypeDefinition> traits,
+ List<ClassTypeDefinition> classes){
+ enums = (enums != null) ? enums : Collections.<EnumTypeDefinition>emptyList();
+ structs = (structs != null) ? structs : Collections.<StructTypeDefinition>emptyList();
+ traits = (traits != null) ? traits : Collections.<TraitTypeDefinition>emptyList();
+ classes = (classes != null) ? classes : Collections.<ClassTypeDefinition>emptyList();
+ return new TypesDef(enums, structs, traits, classes);
}
protected AtlasEntityHeader modifyEntity(AtlasEntity atlasEntity, boolean update) {
EntityMutationResponse entity = null;
+
try {
if (!update) {
entity = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(atlasEntity));
+
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size() > 0);
+
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0);
} else {
entity = atlasClientV2.updateEntity(new AtlasEntityWithExtInfo(atlasEntity));
+
assertNotNull(entity);
assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE));
assertTrue(entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0);
+
return entity.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).get(0);
}
} catch (AtlasServiceException e) {
LOG.error("Entity {} failed", update ? "update" : "creation", entity);
}
+
return null;
}
@@ -278,101 +276,89 @@ public abstract class BaseResourceIT {
return modifyEntity(atlasEntity, true);
}
- protected static final String DATABASE_TYPE_V2 = "hive_db_v2";
- protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2";
- protected static final String COLUMN_TYPE_V2 = "hive_column_v2";
+ protected static final String DATABASE_TYPE_V2 = "hive_db_v2";
+ protected static final String HIVE_TABLE_TYPE_V2 = "hive_table_v2";
+ protected static final String COLUMN_TYPE_V2 = "hive_column_v2";
protected static final String HIVE_PROCESS_TYPE_V2 = "hive_process_v2";
- protected static final String DATABASE_TYPE = "hive_db_v1";
- protected static final String HIVE_TABLE_TYPE = "hive_table_v1";
- protected static final String COLUMN_TYPE = "hive_column_v1";
+ protected static final String DATABASE_TYPE = "hive_db_v1";
+ protected static final String HIVE_TABLE_TYPE = "hive_table_v1";
+ protected static final String COLUMN_TYPE = "hive_column_v1";
protected static final String HIVE_PROCESS_TYPE = "hive_process_v1";
- protected static final String DATABASE_TYPE_BUILTIN = "hive_db";
- protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table";
- protected static final String COLUMN_TYPE_BUILTIN = "hive_column";
+ protected static final String DATABASE_TYPE_BUILTIN = "hive_db";
+ protected static final String HIVE_TABLE_TYPE_BUILTIN = "hive_table";
+ protected static final String COLUMN_TYPE_BUILTIN = "hive_column";
protected static final String HIVE_PROCESS_TYPE_BUILTIN = "hive_process";
protected void createTypeDefinitionsV1() throws Exception {
- HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
- .createClassTypeDef(DATABASE_TYPE, null,
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE),
- attrDef("locationUri", DataTypes.STRING_TYPE),
- attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.INT_TYPE),
- new AttributeDefinition("tables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
+ ClassTypeDefinition dbClsDef = TypesUtil
+ .createClassTypeDef(DATABASE_TYPE, null, null,
+ TypesUtil.createUniqueRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef(DESCRIPTION, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("locationUri", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_INT),
+ new AttributeDefinition("tables", AtlasBaseTypeDef.getArrayTypeName(HIVE_TABLE_TYPE),
Multiplicity.OPTIONAL, false, "db")
);
- HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
- .createClassTypeDef(COLUMN_TYPE, null, attrDef(NAME, DataTypes.STRING_TYPE),
- attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
+ ClassTypeDefinition columnClsDef = TypesUtil
+ .createClassTypeDef(COLUMN_TYPE, null, null, attrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ attrDef("dataType", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("comment", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
- StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType",
- new AttributeDefinition[]{TypesUtil.createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)});
+ StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType", null,
+ Arrays.asList(new AttributeDefinition[]{TypesUtil.createRequiredAttrDef(NAME, AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef("serde", AtlasBaseTypeDef.ATLAS_TYPE_STRING)}));
EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
- EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
+ EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", null, null, Arrays.asList(values));
- HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
- .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableSet.of("DataSet"),
- attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE),
- attrDef("lastAccessTime", DataTypes.DATE_TYPE),
- attrDef("temporary", DataTypes.BOOLEAN_TYPE),
+ ClassTypeDefinition tblClsDef = TypesUtil
+ .createClassTypeDef(HIVE_TABLE_TYPE, null, Collections.singleton("DataSet"),
+ attrDef("owner", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("createTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("lastAccessTime", AtlasBaseTypeDef.ATLAS_TYPE_DATE),
+ attrDef("temporary", AtlasBaseTypeDef.ATLAS_TYPE_BOOLEAN),
new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.OPTIONAL, true, "tables"),
- new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
+ new AttributeDefinition("columns", AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE),
Multiplicity.OPTIONAL, true, null),
new AttributeDefinition("tableType", "tableType", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serde1", "serdeType", Multiplicity.OPTIONAL, false, null),
new AttributeDefinition("serde2", "serdeType", Multiplicity.OPTIONAL, false, null));
- HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
- .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableSet.of("Process"),
- attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.INT_TYPE),
- attrDef("endTime", DataTypes.LONG_TYPE),
- attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
-
- HierarchicalTypeDefinition<TraitType> classificationTrait = TypesUtil
- .createTraitTypeDef("classification", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(PII_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> phiTrait =
- TypesUtil.createTraitTypeDef(PHI_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> pciTrait =
- TypesUtil.createTraitTypeDef(PCI_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> soxTrait =
- TypesUtil.createTraitTypeDef(SOX_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> secTrait =
- TypesUtil.createTraitTypeDef(SEC_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> financeTrait =
- TypesUtil.createTraitTypeDef(FINANCE_TAG, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> factTrait =
- TypesUtil.createTraitTypeDef("Fact" + randomString(), ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> etlTrait =
- TypesUtil.createTraitTypeDef("ETL" + randomString(), ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> dimensionTrait =
- TypesUtil.createTraitTypeDef("Dimension" + randomString(), ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> metricTrait =
- TypesUtil.createTraitTypeDef("Metric" + randomString(), ImmutableSet.<String>of());
-
- createType(getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
- ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait,
- soxTrait, secTrait, financeTrait, factTrait, etlTrait, dimensionTrait, metricTrait),
- ImmutableList.of(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)));
+ ClassTypeDefinition loadProcessClsDef = TypesUtil
+ .createClassTypeDef(HIVE_PROCESS_TYPE, null, Collections.singleton("Process"),
+ attrDef("userName", AtlasBaseTypeDef.ATLAS_TYPE_STRING), attrDef("startTime", AtlasBaseTypeDef.ATLAS_TYPE_INT),
+ attrDef("endTime", AtlasBaseTypeDef.ATLAS_TYPE_LONG),
+ attrDef("queryText", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryPlan", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryId", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED),
+ attrDef("queryGraph", AtlasBaseTypeDef.ATLAS_TYPE_STRING, Multiplicity.REQUIRED));
+
+ TraitTypeDefinition classificationTrait = TypesUtil
+ .createTraitTypeDef("classification", null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("tag", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+
+ TraitTypeDefinition piiTrait = TypesUtil.createTraitTypeDef(PII_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition phiTrait = TypesUtil.createTraitTypeDef(PHI_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition pciTrait = TypesUtil.createTraitTypeDef(PCI_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition soxTrait = TypesUtil.createTraitTypeDef(SOX_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition secTrait = TypesUtil.createTraitTypeDef(SEC_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition financeTrait = TypesUtil.createTraitTypeDef(FINANCE_TAG, null, Collections.<String>emptySet());
+ TraitTypeDefinition factTrait = TypesUtil.createTraitTypeDef("Fact" + randomString(), null, Collections.<String>emptySet());
+ TraitTypeDefinition etlTrait = TypesUtil.createTraitTypeDef("ETL" + randomString(), null, Collections.<String>emptySet());
+ TraitTypeDefinition dimensionTrait = TypesUtil.createTraitTypeDef("Dimension" + randomString(), null, Collections.<String>emptySet());
+ TraitTypeDefinition metricTrait = TypesUtil.createTraitTypeDef("Metric" + randomString(), null, Collections.<String>emptySet());
+
+ createType(getTypesDef(Collections.singletonList(enumTypeDefinition),
+ Collections.singletonList(structTypeDefinition),
+ Arrays.asList(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait, factTrait, etlTrait, dimensionTrait, metricTrait),
+ Arrays.asList(dbClsDef, columnClsDef, tblClsDef, loadProcessClsDef)));
}
protected void createTypeDefinitionsV2() throws Exception {
-
AtlasConstraintDef isCompositeSourceConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_OWNED_REF);
-
- AtlasConstraintDef isCompositeTargetConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_INVERSE_REF,
- Collections.<String, Object>singletonMap(CONSTRAINT_PARAM_ATTRIBUTE, "randomTable"));
+ AtlasConstraintDef isCompositeTargetConstraint = new AtlasConstraintDef(CONSTRAINT_TYPE_INVERSE_REF, Collections.<String, Object>singletonMap(CONSTRAINT_PARAM_ATTRIBUTE, "randomTable"));
AtlasEntityDef dbClsTypeDef = AtlasTypeUtil.createClassTypeDef(
DATABASE_TYPE_V2,
@@ -385,7 +371,7 @@ public abstract class BaseResourceIT {
AtlasTypeUtil.createOptionalAttrDef("createTime", "int"),
//there is a serializ
new AtlasAttributeDef("randomTable",
- DataTypes.arrayTypeName(HIVE_TABLE_TYPE_V2),
+ AtlasBaseTypeDef.getArrayTypeName(HIVE_TABLE_TYPE_V2),
true,
Cardinality.SET,
0, -1, false, true, Collections.singletonList(isCompositeSourceConstraint))
@@ -409,7 +395,7 @@ public abstract class BaseResourceIT {
AtlasEntityDef tblClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_TABLE_TYPE_V2,
- ImmutableSet.of("DataSet"),
+ Collections.singleton("DataSet"),
AtlasTypeUtil.createOptionalAttrDef("owner", "string"),
AtlasTypeUtil.createOptionalAttrDef("createTime", "long"),
AtlasTypeUtil.createOptionalAttrDef("lastAccessTime", "date"),
@@ -421,14 +407,14 @@ public abstract class BaseResourceIT {
0, 1, false, true, Collections.singletonList(isCompositeTargetConstraint)),
//some tests don't set the columns field or set it to null...
- AtlasTypeUtil.createOptionalAttrDef("columns", DataTypes.arrayTypeName(COLUMN_TYPE_V2)),
+ AtlasTypeUtil.createOptionalAttrDef("columns", AtlasBaseTypeDef.getArrayTypeName(COLUMN_TYPE_V2)),
AtlasTypeUtil.createOptionalAttrDef("tableType", "tableType"),
AtlasTypeUtil.createOptionalAttrDef("serde1", "serdeType"),
AtlasTypeUtil.createOptionalAttrDef("serde2", "serdeType"));
AtlasEntityDef loadProcessClsDef = AtlasTypeUtil
.createClassTypeDef(HIVE_PROCESS_TYPE_V2,
- ImmutableSet.of("Process"),
+ Collections.singleton("Process"),
AtlasTypeUtil.createOptionalAttrDef("userName", "string"),
AtlasTypeUtil.createOptionalAttrDef("startTime", "int"),
AtlasTypeUtil.createOptionalAttrDef("endTime", "long"),
@@ -438,42 +424,36 @@ public abstract class BaseResourceIT {
AtlasTypeUtil.createRequiredAttrDef("queryGraph", "string"));
AtlasClassificationDef classificationTrait = AtlasTypeUtil
- .createTraitTypeDef("classification",ImmutableSet.<String>of(),
+ .createTraitTypeDef("classification", Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("tag", "string"));
- AtlasClassificationDef piiTrait =
- AtlasTypeUtil.createTraitTypeDef(PII_TAG, ImmutableSet.<String>of());
- AtlasClassificationDef phiTrait =
- AtlasTypeUtil.createTraitTypeDef(PHI_TAG, ImmutableSet.<String>of());
- AtlasClassificationDef pciTrait =
- AtlasTypeUtil.createTraitTypeDef(PCI_TAG, ImmutableSet.<String>of());
- AtlasClassificationDef soxTrait =
- AtlasTypeUtil.createTraitTypeDef(SOX_TAG, ImmutableSet.<String>of());
- AtlasClassificationDef secTrait =
- AtlasTypeUtil.createTraitTypeDef(SEC_TAG, ImmutableSet.<String>of());
- AtlasClassificationDef financeTrait =
- AtlasTypeUtil.createTraitTypeDef(FINANCE_TAG, ImmutableSet.<String>of());
-
- AtlasTypesDef typesDef = new AtlasTypesDef(ImmutableList.of(enumDef),
- ImmutableList.of(structTypeDef),
- ImmutableList.of(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait),
- ImmutableList.of(dbClsTypeDef, columnClsDef, tblClsDef, loadProcessClsDef));
+ AtlasClassificationDef piiTrait = AtlasTypeUtil.createTraitTypeDef(PII_TAG, Collections.<String>emptySet());
+ AtlasClassificationDef phiTrait = AtlasTypeUtil.createTraitTypeDef(PHI_TAG, Collections.<String>emptySet());
+ AtlasClassificationDef pciTrait = AtlasTypeUtil.createTraitTypeDef(PCI_TAG, Collections.<String>emptySet());
+ AtlasClassificationDef soxTrait = AtlasTypeUtil.createTraitTypeDef(SOX_TAG, Collections.<String>emptySet());
+ AtlasClassificationDef secTrait = AtlasTypeUtil.createTraitTypeDef(SEC_TAG, Collections.<String>emptySet());
+ AtlasClassificationDef financeTrait = AtlasTypeUtil.createTraitTypeDef(FINANCE_TAG, Collections.<String>emptySet());
+
+ AtlasTypesDef typesDef = new AtlasTypesDef(Collections.singletonList(enumDef),
+ Collections.singletonList(structTypeDef),
+ Arrays.asList(classificationTrait, piiTrait, phiTrait, pciTrait, soxTrait, secTrait, financeTrait),
+ Arrays.asList(dbClsTypeDef, columnClsDef, tblClsDef, loadProcessClsDef));
batchCreateTypes(typesDef);
}
- AttributeDefinition attrDef(String name, IDataType dT) {
+ AttributeDefinition attrDef(String name, String dT) {
return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
}
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
+ AttributeDefinition attrDef(String name, String dT, Multiplicity m) {
return attrDef(name, dT, m, false, null);
}
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
- String reverseAttributeName) {
+ AttributeDefinition attrDef(String name, String dT, Multiplicity m, boolean isComposite, String reverseAttributeName) {
Preconditions.checkNotNull(name);
Preconditions.checkNotNull(dT);
- return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
+
+ return new AttributeDefinition(name, dT, m, isComposite, reverseAttributeName);
}
protected String randomString() {
@@ -490,9 +470,9 @@ public abstract class BaseResourceIT {
values.put(CLUSTER_NAME, "cl1");
values.put("parameters", Collections.EMPTY_MAP);
values.put("location", "/tmp");
+
Referenceable databaseInstance = new Referenceable(dbId._getId(), dbId.getTypeName(), values);
- Referenceable tableInstance =
- new Referenceable(HIVE_TABLE_TYPE_BUILTIN, CLASSIFICATION, PII_TAG, PHI_TAG, PCI_TAG, SOX_TAG, SEC_TAG, FINANCE_TAG);
+ Referenceable tableInstance = new Referenceable(HIVE_TABLE_TYPE_BUILTIN, CLASSIFICATION, PII_TAG, PHI_TAG, PCI_TAG, SOX_TAG, SEC_TAG, FINANCE_TAG);
tableInstance.set(NAME, tableName);
tableInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName);
tableInstance.set("db", databaseInstance);
@@ -516,7 +496,7 @@ public abstract class BaseResourceIT {
serde2Instance.set("serde", "serde2");
tableInstance.set("serde2", serde2Instance);
- List<String> traits = tableInstance.getTraits();
+ List<String> traits = tableInstance.getTraitNames();
Assert.assertEquals(traits.size(), 7);
return tableInstance;
@@ -564,30 +544,36 @@ public abstract class BaseResourceIT {
}
protected Referenceable createHiveDBInstanceBuiltIn(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
+
databaseInstance.set(NAME, dbName);
databaseInstance.set(QUALIFIED_NAME, dbName);
databaseInstance.set(CLUSTER_NAME, randomString());
databaseInstance.set(DESCRIPTION, "foo database");
+
return databaseInstance;
}
protected Referenceable createHiveDBInstanceV1(String dbName) {
Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+
databaseInstance.set(NAME, dbName);
databaseInstance.set(DESCRIPTION, "foo database");
databaseInstance.set(CLUSTER_NAME, "fooCluster");
+
return databaseInstance;
}
protected AtlasEntity createHiveDBInstanceV2(String dbName) {
AtlasEntity atlasEntity = new AtlasEntity(DATABASE_TYPE_V2);
+
atlasEntity.setAttribute(NAME, dbName);
atlasEntity.setAttribute(DESCRIPTION, "foo database");
atlasEntity.setAttribute(CLUSTER_NAME, "fooCluster");
atlasEntity.setAttribute("owner", "user1");
atlasEntity.setAttribute("locationUri", "/tmp");
atlasEntity.setAttribute("createTime",1000);
+
return atlasEntity;
}
@@ -611,7 +597,7 @@ public abstract class BaseResourceIT {
* @return the boolean result of the evaluation.
* @throws Exception thrown if the predicate evaluation could not evaluate.
*/
- boolean evaluate(EntityNotification notification) throws Exception;
+ boolean evaluate(EntityNotificationV1 notification) throws Exception;
}
/**
@@ -622,54 +608,62 @@ public abstract class BaseResourceIT {
*/
protected void waitFor(int timeout, Predicate predicate) throws Exception {
ParamChecker.notNull(predicate, "predicate");
- long mustEnd = System.currentTimeMillis() + timeout;
+ long mustEnd = System.currentTimeMillis() + timeout;
boolean eval;
+
while (!(eval = predicate.evaluate()) && System.currentTimeMillis() < mustEnd) {
LOG.info("Waiting up to {} msec", mustEnd - System.currentTimeMillis());
+
Thread.sleep(100);
}
+
if (!eval) {
throw new Exception("Waiting timed out after " + timeout + " msec");
}
}
- protected EntityNotification waitForNotification(final NotificationConsumer<EntityNotification> consumer, int maxWait,
- final NotificationPredicate predicate) throws Exception {
- final TypeUtils.Pair<EntityNotification, String> pair = TypeUtils.Pair.of(null, null);
- final long maxCurrentTime = System.currentTimeMillis() + maxWait;
+ protected EntityNotificationV1 waitForNotification(final NotificationConsumer<EntityNotificationV1> consumer, int maxWait,
+ final NotificationPredicate predicate) throws Exception {
+ final TypesUtil.Pair<EntityNotificationV1, String> pair = TypesUtil.Pair.of(null, null);
+ final long maxCurrentTime = System.currentTimeMillis() + maxWait;
+
waitFor(maxWait, new Predicate() {
@Override
public boolean evaluate() throws Exception {
try {
-
while (System.currentTimeMillis() < maxCurrentTime) {
- List<AtlasKafkaMessage<EntityNotification>> messageList = consumer.receive();
- if(messageList.size() > 0) {
- EntityNotification notification = messageList.get(0).getMessage();
- if (predicate.evaluate(notification)) {
- pair.left = notification;
- return true;
- }
- }else{
- LOG.info( System.currentTimeMillis()+ " messageList no records" +maxCurrentTime );
+ List<AtlasKafkaMessage<EntityNotificationV1>> messageList = consumer.receive();
+
+ if(messageList.size() > 0) {
+ EntityNotificationV1 notification = messageList.get(0).getMessage();
+
+ if (predicate.evaluate(notification)) {
+ pair.left = notification;
+
+ return true;
}
+ } else {
+ LOG.info( System.currentTimeMillis()+ " messageList no records" +maxCurrentTime );
+ }
}
} catch(Exception e) {
LOG.error(" waitForNotification", e);
//ignore
}
+
return false;
}
});
+
return pair.left;
}
- protected NotificationPredicate newNotificationPredicate(final EntityNotification.OperationType operationType,
+ protected NotificationPredicate newNotificationPredicate(final EntityNotificationV1.OperationType operationType,
final String typeName, final String guid) {
return new NotificationPredicate() {
@Override
- public boolean evaluate(EntityNotification notification) throws Exception {
+ public boolean evaluate(EntityNotificationV1 notification) throws Exception {
return notification != null &&
notification.getOperationType() == operationType &&
notification.getEntity().getTypeName().equals(typeName) &&
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java
index 5806a10..658be49 100644
--- a/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/BasicSearchIT.java
@@ -17,7 +17,6 @@
*/
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.model.discovery.SearchParameters;
@@ -39,6 +38,7 @@ import org.testng.annotations.Test;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.HashMap;
import static org.codehaus.jackson.annotate.JsonAutoDetect.Visibility.NONE;
@@ -61,7 +61,7 @@ public class BasicSearchIT extends BaseResourceIT {
// Create a test tag
if (!atlasClientV2.typeWithNameExists("fooTag")) {
- AtlasClassificationDef testClassificationDef = AtlasTypeUtil.createTraitTypeDef("fooTag", "Test tag", "1.0", ImmutableSet.<String>of());
+ AtlasClassificationDef testClassificationDef = AtlasTypeUtil.createTraitTypeDef("fooTag", "Test tag", "1.0", Collections.<String>emptySet());
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getClassificationDefs().add(testClassificationDef);
atlasClientV2.createAtlasTypeDefs(typesDef);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
index 9a0d0dc..7e50480 100644
--- a/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/DataSetLineageJerseyResourceIT.java
@@ -18,23 +18,22 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.typedef.TraitTypeDefinition;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
@@ -73,7 +72,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
Assert.assertNotNull(results);
- Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+ Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class);
Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
Assert.assertEquals(vertices.size(), 4);
@@ -88,7 +87,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
JSONObject results = atlasClientV1.getInputGraphForEntity(tableId);
Assert.assertNotNull(results);
- Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+ Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class);
Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
Assert.assertEquals(vertices.size(), 4);
Struct vertex = vertices.get(tableId);
@@ -109,7 +108,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
JSONObject results = response.getJSONObject(AtlasClient.RESULTS);
Assert.assertNotNull(results);
- Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+ Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class);
Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
Assert.assertEquals(vertices.size(), 3);
@@ -124,7 +123,7 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
JSONObject results = atlasClientV1.getOutputGraphForEntity(tableId);
Assert.assertNotNull(results);
- Struct resultsInstance = InstanceSerialization.fromJsonStruct(results.toString(), true);
+ Struct resultsInstance = AtlasType.fromV1Json(results.toString(), Struct.class);
Map<String, Struct> vertices = (Map<String, Struct>) resultsInstance.get("vertices");
Assert.assertEquals(vertices.size(), 3);
Struct vertex = vertices.get(tableId);
@@ -187,31 +186,25 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
}
private void setupInstances() throws Exception {
- HierarchicalTypeDefinition<TraitType> factTrait =
- TypesUtil.createTraitTypeDef(FACT, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> etlTrait =
- TypesUtil.createTraitTypeDef(ETL, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> dimensionTrait =
- TypesUtil.createTraitTypeDef(DIMENSION, ImmutableSet.<String>of());
- HierarchicalTypeDefinition<TraitType> metricTrait =
- TypesUtil.createTraitTypeDef(METRIC, ImmutableSet.<String>of());
- createType(getTypesDef(null, null,
- ImmutableList.of(factTrait, etlTrait, dimensionTrait, metricTrait), null));
+ TraitTypeDefinition factTrait = TypesUtil.createTraitTypeDef(FACT, null, Collections.<String>emptySet());
+ TraitTypeDefinition etlTrait = TypesUtil.createTraitTypeDef(ETL, null, Collections.<String>emptySet());
+ TraitTypeDefinition dimensionTrait = TypesUtil.createTraitTypeDef(DIMENSION, null, Collections.<String>emptySet());
+ TraitTypeDefinition metricTrait = TypesUtil.createTraitTypeDef(METRIC, null, Collections.<String>emptySet());
+
+ createType(getTypesDef(null, null, Arrays.asList(factTrait, etlTrait, dimensionTrait, metricTrait), null));
salesDBName = "Sales" + randomString();
Id salesDB = database(salesDBName, "Sales Database", "John ETL",
"hdfs://host:8000/apps/warehouse/sales");
- List<Referenceable> salesFactColumns = ImmutableList
- .of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
+ List<Referenceable> salesFactColumns = Arrays.asList(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
column("customer_id", "int", "customer id", PII),
column("sales", "double", "product id", METRIC));
salesFactTable = "sales_fact" + randomString();
Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns, FACT);
- List<Referenceable> timeDimColumns = ImmutableList
- .of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
+ List<Referenceable> timeDimColumns = Arrays.asList(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day"));
Id timeDim =
@@ -226,16 +219,16 @@ public class DataSetLineageJerseyResourceIT extends BaseResourceIT {
table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB,
"Joe BI", "MANAGED", salesFactColumns, METRIC);
- loadProcess("loadSalesDaily" + randomString(), "John ETL", ImmutableList.of(salesFact, timeDim),
- ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", ETL);
+ loadProcess("loadSalesDaily" + randomString(), "John ETL", Arrays.asList(salesFact, timeDim),
+ Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph", ETL);
salesMonthlyTable = "sales_fact_monthly_mv" + randomString();
Id salesFactMonthly =
table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI",
"MANAGED", salesFactColumns, METRIC);
- loadProcess("loadSalesMonthly" + randomString(), "John ETL", ImmutableList.of(salesFactDaily),
- ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", ETL);
+ loadProcess("loadSalesMonthly" + randomString(), "John ETL", Collections.singletonList(salesFactDaily),
+ Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph", ETL);
}
Id database(String name, String description, String owner, String locationUri, String... traitNames)
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/EntityDiscoveryJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityDiscoveryJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityDiscoveryJerseyResourceIT.java
index 157bf97..75d3872 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityDiscoveryJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/EntityDiscoveryJerseyResourceIT.java
@@ -18,8 +18,6 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
@@ -27,18 +25,14 @@ import org.apache.atlas.model.discovery.AtlasSearchResult.AtlasFullTextResult;
import org.apache.atlas.model.discovery.AtlasSearchResult.AtlasQueryType;
import org.apache.atlas.model.instance.AtlasEntity.Status;
import org.apache.atlas.model.instance.AtlasEntityHeader;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.typedef.*;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
+import java.util.Collections;
import java.util.List;
import static org.testng.Assert.assertEquals;
@@ -196,16 +190,16 @@ public class EntityDiscoveryJerseyResourceIT extends BaseResourceIT {
}
private void createTypes() throws Exception {
- HierarchicalTypeDefinition<ClassType> dslTestTypeDefinition = TypesUtil
- .createClassTypeDef("dsl_test_type", ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil
- .createTraitTypeDef("Classification", ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(classificationTraitDefinition), ImmutableList.of(dslTestTypeDefinition));
+ ClassTypeDefinition dslTestTypeDefinition = TypesUtil
+ .createClassTypeDef("dsl_test_type", null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+
+ TraitTypeDefinition classificationTraitDefinition = TypesUtil
+ .createTraitTypeDef("Classification", null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("tag", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ TypesDef typesDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(),
+ Collections.singletonList(classificationTraitDefinition), Collections.singletonList(dslTestTypeDefinition));
createType(typesDef);
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java
index cf44d4a..0f585f9 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/EntityJerseyResourceIT.java
@@ -18,8 +18,6 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasClient;
@@ -27,22 +25,14 @@ import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.kafka.NotificationProvider;
import org.apache.atlas.model.legacy.EntityResult;
+import org.apache.atlas.model.typedef.AtlasBaseTypeDef;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.v1.model.typedef.*;
import org.apache.atlas.notification.NotificationInterface;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.json.InstanceSerialization$;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.json.TypesSerialization$;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.DataTypes;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.atlas.utils.AuthenticationUtil;
import org.apache.commons.lang.RandomStringUtils;
import org.codehaus.jettison.json.JSONArray;
@@ -57,12 +47,7 @@ import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
+import java.util.*;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
@@ -193,7 +178,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
databaseInstance.set("location", "/tmp");
JSONObject response = atlasClientV1
- .callAPIWithBody(AtlasClient.API_V1.CREATE_ENTITY, InstanceSerialization.toJson(databaseInstance, true));
+ .callAPIWithBody(AtlasClient.API_V1.CREATE_ENTITY, AtlasType.toV1Json(databaseInstance));
assertNotNull(response);
Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));
@@ -244,23 +229,23 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test
public void testEntityDefinitionAcrossTypeUpdate() throws Exception {
//create type
- HierarchicalTypeDefinition<ClassType> typeDefinition = TypesUtil
- .createClassTypeDef(randomString(), ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE));
- atlasClientV1.createType(TypesSerialization.toJson(typeDefinition, false));
+ ClassTypeDefinition typeDefinition = TypesUtil
+ .createClassTypeDef(randomString(), null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ atlasClientV1.createType(AtlasType.toV1Json(typeDefinition));
//create entity for the type
- Referenceable instance = new Referenceable(typeDefinition.typeName);
+ Referenceable instance = new Referenceable(typeDefinition.getTypeName());
instance.set("name", randomString());
String guid = atlasClientV1.createEntity(instance).get(0);
//update type - add attribute
- typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.typeName, ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createOptionalAttrDef("description", DataTypes.STRING_TYPE));
- TypesDef typeDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(typeDefinition));
+ typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.getTypeName(), null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createOptionalAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ TypesDef typeDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(),
+ Collections.<StructTypeDefinition>emptyList(), Collections.<TraitTypeDefinition>emptyList(),
+ Arrays.asList(typeDefinition));
atlasClientV1.updateType(typeDef);
//Get definition after type update - new attributes should be null
@@ -537,12 +522,12 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
private String addNewType() throws Exception {
String typeName = "test" + randomString();
- HierarchicalTypeDefinition<ClassType> testTypeDefinition = TypesUtil
- .createClassTypeDef(typeName, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
- TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+ ClassTypeDefinition testTypeDefinition = TypesUtil
+ .createClassTypeDef(typeName, null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("name", AtlasBaseTypeDef.ATLAS_TYPE_STRING),
+ TypesUtil.createRequiredAttrDef("description", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
- String typesAsJSON = TypesSerialization.toJson(testTypeDefinition, false);
+ String typesAsJSON = AtlasType.toV1Json(testTypeDefinition);
createType(typesAsJSON);
return typeName;
}
@@ -585,9 +570,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
createType(traitDefinitionAsJSON);
@@ -614,9 +599,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
createType(traitDefinitionAsJSON);
@@ -648,9 +633,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
createType(traitDefinitionAsJSON);
@@ -682,10 +667,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
final String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil
- .createTraitTypeDef(traitName, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait = TypesUtil
+ .createTraitTypeDef(traitName, null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
createType(traitDefinitionAsJSON);
@@ -705,13 +690,13 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
@Test(expectedExceptions = AtlasServiceException.class)
public void testAddTraitWithNoRegistration() throws Exception {
final String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
Struct traitInstance = new Struct(traitName);
- String traitInstanceAsJSON = InstanceSerialization$.MODULE$.toJson(traitInstance, true);
+ String traitInstanceAsJSON = AtlasType.toV1Json(traitInstance);
LOG.debug("traitInstanceAsJSON = {}", traitInstanceAsJSON);
atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.CREATE_ENTITY, traitInstanceAsJSON, "random", TRAITS);
@@ -734,9 +719,9 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait =
- TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait =
+ TypesUtil.createTraitTypeDef(traitName, null, Collections.<String>emptySet());
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
LOG.debug("traitDefinitionAsJSON = {}", traitDefinitionAsJSON);
createType(traitDefinitionAsJSON);
@@ -794,10 +779,10 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
}
final String traitName = "PII_Trait" + randomString();
- HierarchicalTypeDefinition<TraitType> piiTrait = TypesUtil
- .createTraitTypeDef(traitName, ImmutableSet.<String>of(),
- TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
- String traitDefinitionAsJSON = TypesSerialization$.MODULE$.toJson(piiTrait, true);
+ TraitTypeDefinition piiTrait = TypesUtil
+ .createTraitTypeDef(traitName, null, Collections.<String>emptySet(),
+ TypesUtil.createRequiredAttrDef("type", AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ String traitDefinitionAsJSON = AtlasType.toV1Json(piiTrait);
createType(traitDefinitionAsJSON);
try {
@@ -821,12 +806,12 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
String attrName = random();
String attrValue = random();
- HierarchicalTypeDefinition<ClassType> classTypeDefinition = TypesUtil
- .createClassTypeDef(classType, ImmutableSet.<String>of(),
- TypesUtil.createUniqueRequiredAttrDef(attrName, DataTypes.STRING_TYPE));
- TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(classTypeDefinition));
+ ClassTypeDefinition classTypeDefinition = TypesUtil
+ .createClassTypeDef(classType, null, Collections.<String>emptySet(),
+ TypesUtil.createUniqueRequiredAttrDef(attrName, AtlasBaseTypeDef.ATLAS_TYPE_STRING));
+ TypesDef typesDef = new TypesDef(Collections.<EnumTypeDefinition>emptyList(), Collections.<StructTypeDefinition>emptyList(),
+ Collections.<TraitTypeDefinition>emptyList(),
+ Collections.singletonList(classTypeDefinition));
createType(typesDef);
Referenceable instance = new Referenceable(classType);
@@ -834,7 +819,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Id guid = createInstance(instance);
JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API_V1.GET_ENTITY, null, guid._getId());
- Referenceable getReferenceable = InstanceSerialization.fromJsonReferenceable(response.getString(AtlasClient.DEFINITION), true);
+ Referenceable getReferenceable = AtlasType.fromV1Json(response.getString(AtlasClient.DEFINITION), Referenceable.class);
Assert.assertEquals(getReferenceable.get(attrName), attrValue);
}
@@ -882,7 +867,7 @@ public class EntityJerseyResourceIT extends BaseResourceIT {
Referenceable entity = atlasClientV1.getEntity(guid);
List<Referenceable> refs = (List<Referenceable>) entity.get("columns");
- Assert.assertTrue(refs.get(0).equalsContents(columns.get(0)));
+ Assert.assertTrue(refs.get(0).equals(columns.get(0)));
}
@Test
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java
index 3811390..4be0002 100644
--- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/EntityLineageJerseyResourceIT.java
@@ -18,15 +18,14 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.gson.Gson;
import com.sun.jersey.core.util.MultivaluedMapImpl;
import org.apache.atlas.AtlasBaseClient;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.lineage.AtlasLineageInfo;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.type.AtlasType;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.codehaus.jettison.json.JSONObject;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
@@ -34,9 +33,7 @@ import org.testng.annotations.Test;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
/**
* Entity Lineage v2 Integration Tests.
@@ -53,7 +50,6 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
private String salesFactTable;
private String salesMonthlyTable;
private String salesDBName;
- Gson gson = new Gson();
@BeforeClass
public void setUp() throws Exception {
@@ -76,7 +72,7 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
System.out.println("input lineage info = " + response
);
- AtlasLineageInfo inputLineageInfo = gson.fromJson(response.toString(), AtlasLineageInfo.class);
+ AtlasLineageInfo inputLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class);
Map<String, AtlasEntityHeader> entities = inputLineageInfo.getGuidEntityMap();
Assert.assertNotNull(entities);
@@ -104,7 +100,7 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
Assert.assertNotNull(response);
System.out.println("output lineage info = " + response);
- AtlasLineageInfo outputLineageInfo = gson.fromJson(response.toString(), AtlasLineageInfo.class);
+ AtlasLineageInfo outputLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class);
Map<String, AtlasEntityHeader> entities = outputLineageInfo.getGuidEntityMap();
Assert.assertNotNull(entities);
@@ -132,7 +128,7 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
Assert.assertNotNull(response);
System.out.println("both lineage info = " + response);
- AtlasLineageInfo bothLineageInfo = gson.fromJson(response.toString(), AtlasLineageInfo.class);
+ AtlasLineageInfo bothLineageInfo = AtlasType.fromJson(response.toString(), AtlasLineageInfo.class);
Map<String, AtlasEntityHeader> entities = bothLineageInfo.getGuidEntityMap();
Assert.assertNotNull(entities);
@@ -151,16 +147,14 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
salesDBName = "Sales" + randomString();
Id salesDB = database(salesDBName, "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
- List<Referenceable> salesFactColumns = ImmutableList
- .of(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
- column("customer_id", "int", "customer id"),
- column("sales", "double", "product id"));
+ List<Referenceable> salesFactColumns = Arrays.asList(column("time_id", "int", "time id"), column("product_id", "int", "product id"),
+ column("customer_id", "int", "customer id"),
+ column("sales", "double", "product id"));
salesFactTable = "sales_fact" + randomString();
Id salesFact = table(salesFactTable, "sales fact table", salesDB, "Joe", "MANAGED", salesFactColumns);
- List<Referenceable> timeDimColumns = ImmutableList
- .of(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
+ List<Referenceable> timeDimColumns = Arrays.asList(column("time_id", "int", "time id"), column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day"));
Id timeDim =
@@ -175,15 +169,15 @@ public class EntityLineageJerseyResourceIT extends DataSetLineageJerseyResourceI
table("sales_fact_daily_mv" + randomString(), "sales fact daily materialized view", reportingDB,
"Joe BI", "MANAGED", salesFactColumns);
- loadProcess("loadSalesDaily" + randomString(), "John ETL", ImmutableList.of(salesFact, timeDim),
- ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph");
+ loadProcess("loadSalesDaily" + randomString(), "John ETL", Arrays.asList(salesFact, timeDim),
+ Collections.singletonList(salesFactDaily), "create table as select ", "plan", "id", "graph");
salesMonthlyTable = "sales_fact_monthly_mv" + randomString();
Id salesFactMonthly =
table(salesMonthlyTable, "sales fact monthly materialized view", reportingDB, "Jane BI",
"MANAGED", salesFactColumns);
- loadProcess("loadSalesMonthly" + randomString(), "John ETL", ImmutableList.of(salesFactDaily),
- ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph");
+ loadProcess("loadSalesMonthly" + randomString(), "John ETL", Collections.singletonList(salesFactDaily),
+ Collections.singletonList(salesFactMonthly), "create table as select ", "plan", "id", "graph");
}
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java
----------------------------------------------------------------------
diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java
index d61a9af..77a85f6 100755
--- a/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java
+++ b/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java
@@ -18,14 +18,11 @@
package org.apache.atlas.web.integration;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.sun.jersey.api.client.ClientResponse;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasServiceException;
import org.apache.atlas.EntityAuditEvent;
-import org.apache.atlas.kafka.NotificationProvider;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications;
import org.apache.atlas.model.instance.AtlasEntity;
@@ -37,11 +34,8 @@ import org.apache.atlas.model.instance.EntityMutations;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
import org.apache.atlas.model.typedef.AtlasEntityDef;
import org.apache.atlas.model.typedef.AtlasTypesDef;
-import org.apache.atlas.notification.NotificationConsumer;
-import org.apache.atlas.notification.NotificationInterface;
-import org.apache.atlas.notification.entity.EntityNotification;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.types.TypeUtils;
+import org.apache.atlas.v1.typesystem.types.utils.TypesUtil;
import org.apache.commons.lang.RandomStringUtils;
import org.codehaus.jettison.json.JSONArray;
import org.joda.time.DateTime;
@@ -51,11 +45,8 @@ import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.atlas.kafka.AtlasKafkaConsumer;
+import java.util.*;
+
import static org.testng.Assert.*;
@@ -83,7 +74,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
@Test
public void testSubmitEntity() throws Exception {
- TypeUtils.Pair dbAndTable = createDBAndTable();
+ TypesUtil.Pair dbAndTable = createDBAndTable();
assertNotNull(dbAndTable);
assertNotNull(dbAndTable.left);
assertNotNull(dbAndTable.right);
@@ -192,7 +183,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
//create type
AtlasEntityDef entityDef = AtlasTypeUtil
.createClassTypeDef(randomString(),
- ImmutableSet.<String>of(),
+ Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")
);
AtlasTypesDef typesDef = new AtlasTypesDef();
@@ -213,7 +204,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
String guid = mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0).getGuid();
//update type - add attribute
- entityDef = AtlasTypeUtil.createClassTypeDef(entityDef.getName(), ImmutableSet.<String>of(),
+ entityDef = AtlasTypeUtil.createClassTypeDef(entityDef.getName(), Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"),
AtlasTypeUtil.createOptionalAttrDef("description", "string"));
@@ -366,7 +357,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
private String addNewType() throws Exception {
String typeName = "test" + randomString();
AtlasEntityDef classTypeDef = AtlasTypeUtil
- .createClassTypeDef(typeName, ImmutableSet.<String>of(),
+ .createClassTypeDef(typeName, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("name", "string"),
AtlasTypeUtil.createRequiredAttrDef("description", "string"));
AtlasTypesDef typesDef = new AtlasTypesDef();
@@ -419,10 +410,10 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
return hiveDBInstanceV2;
}
- private TypeUtils.Pair<AtlasEntity, AtlasEntity> createDBAndTable() throws Exception {
+ private TypesUtil.Pair<AtlasEntity, AtlasEntity> createDBAndTable() throws Exception {
AtlasEntity dbInstanceV2 = createHiveDB();
AtlasEntity hiveTableInstanceV2 = createHiveTable();
- return TypeUtils.Pair.of(dbInstanceV2, hiveTableInstanceV2);
+ return TypesUtil.Pair.of(dbInstanceV2, hiveTableInstanceV2);
}
private AtlasEntity createHiveTable() throws Exception {
@@ -447,12 +438,12 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
public void testAddTrait() throws Exception {
traitName = "PII_Trait" + randomString();
AtlasClassificationDef piiTrait =
- AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef(traitName, Collections.<String>emptySet());
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getClassificationDefs().add(piiTrait);
createType(typesDef);
- atlasClientV2.addClassifications(createHiveTable().getGuid(), ImmutableList.of(new AtlasClassification(piiTrait.getName())));
+ atlasClientV2.addClassifications(createHiveTable().getGuid(), Collections.singletonList(new AtlasClassification(piiTrait.getName())));
assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD);
}
@@ -461,7 +452,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
public void testGetTraitDefinitionForEntity() throws Exception{
traitName = "PII_Trait" + randomString();
AtlasClassificationDef piiTrait =
- AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef(traitName, Collections.<String>emptySet());
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getClassificationDefs().add(piiTrait);
createType(typesDef);
@@ -487,7 +478,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
public void testAddTraitWithAttribute() throws Exception {
final String traitName = "PII_Trait" + randomString();
AtlasClassificationDef piiTrait = AtlasTypeUtil
- .createTraitTypeDef(traitName, ImmutableSet.<String>of(),
+ .createTraitTypeDef(traitName, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("type", "string"));
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getClassificationDefs().add(piiTrait);
@@ -497,7 +488,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
traitInstance.setAttribute("type", "SSN");
final String guid = createHiveTable().getGuid();
- atlasClientV2.addClassifications(guid, ImmutableList.of(traitInstance));
+ atlasClientV2.addClassifications(guid, Collections.singletonList(traitInstance));
// verify the response
AtlasEntity withAssociationByGuid = atlasClientV2.getEntityByGuid(guid).getEntity();
@@ -518,11 +509,11 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
@Test(expectedExceptions = AtlasServiceException.class)
public void testAddTraitWithNoRegistration() throws Exception {
final String traitName = "PII_Trait" + randomString();
- AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of());
+ AtlasTypeUtil.createTraitTypeDef(traitName, Collections.<String>emptySet());
AtlasClassification traitInstance = new AtlasClassification(traitName);
- atlasClientV2.addClassifications("random", ImmutableList.of(traitInstance));
+ atlasClientV2.addClassifications("random", Collections.singletonList(traitInstance));
}
@Test(dependsOnMethods = "testAddTrait")
@@ -558,7 +549,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
final String guid = createHiveTable().getGuid();
final String traitName = "PII_Trait" + randomString();
AtlasClassificationDef piiTrait = AtlasTypeUtil
- .createTraitTypeDef(traitName, ImmutableSet.<String>of(),
+ .createTraitTypeDef(traitName, Collections.<String>emptySet(),
AtlasTypeUtil.createRequiredAttrDef("type", "string"));
AtlasTypesDef typesDef = new AtlasTypesDef();
typesDef.getClassificationDefs().add(piiTrait);
@@ -584,7 +575,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
String attrValue = random();
AtlasEntityDef classTypeDef = AtlasTypeUtil
- .createClassTypeDef(classType, ImmutableSet.<String>of(),
+ .createClassTypeDef(classType, Collections.<String>emptySet(),
AtlasTypeUtil.createUniqueRequiredAttrDef(attrName, "string"));
AtlasTypesDef atlasTypesDef = new AtlasTypesDef();
atlasTypesDef.getEntityDefs().add(classTypeDef);
@@ -721,7 +712,7 @@ public class EntityV2JerseyResourceIT extends BaseResourceIT {
AtlasEntityHeader entity2Header = createEntity(db2);
// Delete the database entities
- EntityMutationResponse deleteResponse = atlasClientV2.deleteEntitiesByGuids(ImmutableList.of(entity1Header.getGuid(), entity2Header.getGuid()));
+ EntityMutationResponse deleteResponse = atlasClientV2.deleteEntitiesByGuids(Arrays.asList(entity1Header.getGuid(), entity2Header.getGuid()));
// Verify that deleteEntities() response has database entity guids
assertNotNull(deleteResponse);
[24/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/BaseRepositoryTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/BaseRepositoryTest.java b/repository/src/test/java/org/apache/atlas/BaseRepositoryTest.java
deleted file mode 100644
index 7c6b60b..0000000
--- a/repository/src/test/java/org/apache/atlas/BaseRepositoryTest.java
+++ /dev/null
@@ -1,428 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.atlas;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.testng.annotations.Guice;
-
-import javax.inject.Inject;
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.List;
-
-import static org.apache.atlas.AtlasClient.PROCESS_ATTRIBUTE_INPUTS;
-import static org.apache.atlas.AtlasClient.PROCESS_ATTRIBUTE_OUTPUTS;
-
-/**
- * Base Class to set up hive types and instances for tests
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class BaseRepositoryTest {
-
- @Inject
- protected MetadataService metadataService;
-
- @Inject
- protected MetadataRepository repository;
-
-
- protected void setUp() throws Exception {
- //force graph initialization / built in type registration
- TestUtils.getGraph();
- setUpDefaultTypes();
- setUpTypes();
- TestUtils.getGraph().commit();
- new GraphBackedSearchIndexer(new AtlasTypeRegistry());
- TestUtils.resetRequestContext();
- setupInstances();
- TestUtils.getGraph().commit();
- TestUtils.dumpGraph(TestUtils.getGraph());
- }
-
- protected void tearDown() throws Exception {
- TypeSystem.getInstance().reset();
- }
-
- private void setUpTypes() throws Exception {
- TypesDef typesDef = createTypeDefinitions();
- String typesAsJSON = TypesSerialization.toJson(typesDef);
- metadataService.createType(typesAsJSON);
- }
-
- protected static final String DATABASE_TYPE = "hive_db";
- protected static final String HIVE_TABLE_TYPE = "hive_table";
- private static final String COLUMN_TYPE = "hive_column";
- private static final String HIVE_PROCESS_TYPE = "hive_process";
- private static final String STORAGE_DESC_TYPE = "StorageDesc";
- private static final String VIEW_TYPE = "View";
- private static final String PARTITION_TYPE = "hive_partition";
- protected static final String DATASET_SUBTYPE = "dataset_subtype";
-
- TypesDef createTypeDefinitions() {
- HierarchicalTypeDefinition<ClassType> dbClsDef = TypesUtil
- .createClassTypeDef(DATABASE_TYPE, null,
- TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
- attrDef("description", DataTypes.STRING_TYPE), attrDef("locationUri", DataTypes.STRING_TYPE),
- attrDef("owner", DataTypes.STRING_TYPE), attrDef("createTime", DataTypes.LONG_TYPE));
-
- HierarchicalTypeDefinition<ClassType> columnClsDef = TypesUtil
- .createClassTypeDef(COLUMN_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
- attrDef("dataType", DataTypes.STRING_TYPE), attrDef("comment", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> storageDescClsDef = TypesUtil
- .createClassTypeDef(STORAGE_DESC_TYPE, null,
- attrDef("location", DataTypes.STRING_TYPE),
- attrDef("inputFormat", DataTypes.STRING_TYPE), attrDef("outputFormat", DataTypes.STRING_TYPE),
- attrDef("compressed", DataTypes.STRING_TYPE, Multiplicity.REQUIRED, false, null));
-
-
- HierarchicalTypeDefinition<ClassType> tblClsDef = TypesUtil
- .createClassTypeDef(HIVE_TABLE_TYPE, ImmutableSet.of("DataSet"),
- attrDef("owner", DataTypes.STRING_TYPE),
- attrDef("createTime", DataTypes.DATE_TYPE),
- attrDef("lastAccessTime", DataTypes.LONG_TYPE), attrDef("tableType", DataTypes.STRING_TYPE),
- attrDef("temporary", DataTypes.BOOLEAN_TYPE),
- new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
- // todo - uncomment this, something is broken
- new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true, null),
- new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
- Multiplicity.COLLECTION, true, null));
-
- HierarchicalTypeDefinition<ClassType> loadProcessClsDef = TypesUtil
- .createClassTypeDef(HIVE_PROCESS_TYPE, ImmutableSet.of("Process"),
- attrDef("userName", DataTypes.STRING_TYPE), attrDef("startTime", DataTypes.LONG_TYPE),
- attrDef("endTime", DataTypes.LONG_TYPE),
- attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
- attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED));
-
- HierarchicalTypeDefinition<ClassType> viewClsDef = TypesUtil
- .createClassTypeDef(VIEW_TYPE, null, attrDef("name", DataTypes.STRING_TYPE),
- new AttributeDefinition("db", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
- new AttributeDefinition("inputTables", DataTypes.arrayTypeName(HIVE_TABLE_TYPE),
- Multiplicity.COLLECTION, false, null));
-
- AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
- new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
- Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition("table", HIVE_TABLE_TYPE, Multiplicity.REQUIRED, false, null),
- };
- HierarchicalTypeDefinition<ClassType> partClsDef =
- new HierarchicalTypeDefinition<>(ClassType.class, PARTITION_TYPE, null, null,
- attributeDefinitions);
-
- HierarchicalTypeDefinition<ClassType> datasetSubTypeClsDef = TypesUtil
- .createClassTypeDef(DATASET_SUBTYPE, ImmutableSet.of("DataSet"));
-
- HierarchicalTypeDefinition < TraitType > dimTraitDef = TypesUtil.createTraitTypeDef("Dimension", null);
-
- HierarchicalTypeDefinition<TraitType> factTraitDef = TypesUtil.createTraitTypeDef("Fact", null);
-
- HierarchicalTypeDefinition<TraitType> metricTraitDef = TypesUtil.createTraitTypeDef("Metric", null);
-
- HierarchicalTypeDefinition<TraitType> etlTraitDef = TypesUtil.createTraitTypeDef("ETL", null);
-
- HierarchicalTypeDefinition<TraitType> piiTraitDef = TypesUtil.createTraitTypeDef("PII", null);
-
- HierarchicalTypeDefinition<TraitType> jdbcTraitDef = TypesUtil.createTraitTypeDef("JdbcAccess", null);
-
- HierarchicalTypeDefinition<TraitType> logTraitDef = TypesUtil.createTraitTypeDef("Log Data", null);
-
- HierarchicalTypeDefinition<TraitType> isaKeywordTraitDef = TypesUtil.createTraitTypeDef("isa", null);
-
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.of(dimTraitDef, factTraitDef, piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef, logTraitDef,
- isaKeywordTraitDef),
- ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, tblClsDef, loadProcessClsDef, viewClsDef, partClsDef, datasetSubTypeClsDef));
- }
-
- AttributeDefinition attrDef(String name, IDataType dT) {
- return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
- }
-
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
- return attrDef(name, dT, m, false, null);
- }
-
- AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m, boolean isComposite,
- String reverseAttributeName) {
- Preconditions.checkNotNull(name);
- Preconditions.checkNotNull(dT);
- return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
- }
-
- private void setupInstances() throws Exception {
- Id salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
-
- Referenceable sd =
- storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true, ImmutableList.of(
- column("time_id", "int", "time id")));
-
- List<Referenceable> salesFactColumns = ImmutableList
- .of(column("time_id", "int", "time id"),
- column("product_id", "int", "product id"),
- column("customer_id", "int", "customer id", "PII"),
- column("sales", "double", "product id", "Metric"));
-
- Id salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
-
- List<Referenceable> logFactColumns = ImmutableList
- .of(column("time_id", "int", "time id"), column("app_id", "int", "app id"),
- column("machine_id", "int", "machine id"), column("log", "string", "log data", "Log Data"));
-
- List<Referenceable> timeDimColumns = ImmutableList
- .of(column("time_id", "int", "time id"),
- column("dayOfYear", "int", "day Of Year"),
- column("weekDay", "int", "week Day"));
-
- Id timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
- "Dimension");
-
- Id reportingDB =
- database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
-
- Id salesFactDaily =
- table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
- salesFactColumns, "Metric");
-
- Id circularLineageTable1 = table("table1", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
-
- Id circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
-
- loadProcess("circularLineage1", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable1),
- ImmutableList.of(circularLineageTable2), "create table as select ", "plan", "id", "graph", "ETL");
-
- loadProcess("circularLineage2", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable2),
- ImmutableList.of(circularLineageTable1), "create table as select ", "plan", "id", "graph", "ETL");
-
- loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim),
- ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
-
- Id logDB = database("Logging", "logging database", "Tim ETL", "hdfs://host:8000/apps/warehouse/logging");
-
- Id loggingFactDaily =
- table("log_fact_daily_mv", "log fact daily materialized view", logDB, sd, "Tim ETL", "Managed",
- logFactColumns, "Log Data");
-
- List<Referenceable> productDimColumns = ImmutableList
- .of(column("product_id", "int", "product id"),
- column("product_name", "string", "product name"),
- column("brand_name", "int", "brand name"));
-
- Id productDim =
- table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
- "Dimension");
-
- view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess");
-
- List<Referenceable> customerDimColumns = ImmutableList.of(
- column("customer_id", "int", "customer id", "PII"),
- column("name", "string", "customer name", "PII"),
- column("address", "string", "customer address", "PII"));
-
- Id customerDim =
- table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
- "Dimension");
-
- view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
-
- Id salesFactMonthly =
- table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
- "Managed", salesFactColumns, "Metric");
-
- loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
- ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
-
- Id loggingFactMonthly =
- table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL",
- "Managed", logFactColumns, "Log Data");
-
- loadProcess("loadLogsMonthly", "hive query for monthly summary", "Tim ETL", ImmutableList.of(loggingFactDaily),
- ImmutableList.of(loggingFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
-
- partition(new ArrayList() {{ add("2015-01-01"); }}, salesFactDaily);
-
- datasetSubType("dataSetSubTypeInst1", "testOwner");
- }
-
- Id database(String name, String description, String owner, String locationUri, String... traitNames)
- throws Exception {
- Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
- referenceable.set("name", name);
- referenceable.set("description", description);
- referenceable.set("owner", owner);
- referenceable.set("locationUri", locationUri);
- referenceable.set("createTime", System.currentTimeMillis());
-
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, DATABASE_TYPE);
- return createInstance(referenceable, clsType);
- }
-
- protected Referenceable storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List<Referenceable> columns)
- throws Exception {
- Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
- referenceable.set("location", location);
- referenceable.set("inputFormat", inputFormat);
- referenceable.set("outputFormat", outputFormat);
- referenceable.set("compressed", compressed);
- referenceable.set("cols", columns);
-
- return referenceable;
- }
-
- protected Referenceable column(String name, String dataType, String comment, String... traitNames) throws Exception {
- Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
- referenceable.set("name", name);
- referenceable.set("dataType", dataType);
- referenceable.set("comment", comment);
-
- return referenceable;
- }
-
- protected Id table(String name, String description, Id dbId, Referenceable sd, String owner, String tableType,
- List<Referenceable> columns, String... traitNames) throws Exception {
- Referenceable referenceable = new Referenceable(HIVE_TABLE_TYPE, traitNames);
- referenceable.set("name", name);
- referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name);
- referenceable.set("description", description);
- referenceable.set("owner", owner);
- referenceable.set("tableType", tableType);
- referenceable.set("temporary", false);
- referenceable.set("createTime", new Date(System.currentTimeMillis()));
- referenceable.set("lastAccessTime", System.currentTimeMillis());
- referenceable.set("retention", System.currentTimeMillis());
-
- referenceable.set("db", dbId);
- // todo - uncomment this, something is broken
- referenceable.set("sd", sd);
- referenceable.set("columns", columns);
-
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, HIVE_TABLE_TYPE);
- return createInstance(referenceable, clsType);
- }
-
- protected Id loadProcess(String name, String description, String user, List<Id> inputTables, List<Id> outputTables,
- String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames)
- throws Exception {
- Referenceable referenceable = new Referenceable(HIVE_PROCESS_TYPE, traitNames);
- referenceable.set("name", name);
- referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
- referenceable.set("description", description);
- referenceable.set("user", user);
- referenceable.set("startTime", System.currentTimeMillis());
- referenceable.set("endTime", System.currentTimeMillis() + 10000);
-
- referenceable.set("inputs", inputTables);
- referenceable.set("outputs", outputTables);
-
- referenceable.set("queryText", queryText);
- referenceable.set("queryPlan", queryPlan);
- referenceable.set("queryId", queryId);
- referenceable.set("queryGraph", queryGraph);
-
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, HIVE_PROCESS_TYPE);
- return createInstance(referenceable, clsType);
- }
-
- Id view(String name, Id dbId, List<Id> inputTables, String... traitNames) throws Exception {
- Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
- referenceable.set("name", name);
- referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
- referenceable.set("db", dbId);
-
- referenceable.set("inputTables", inputTables);
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, VIEW_TYPE);
- return createInstance(referenceable, clsType);
- }
-
- Id partition(List<String> values, Id table, String... traitNames) throws Exception {
- Referenceable referenceable = new Referenceable(PARTITION_TYPE, traitNames);
- referenceable.set("values", values);
- referenceable.set("table", table);
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, PARTITION_TYPE);
- return createInstance(referenceable, clsType);
- }
-
- Id datasetSubType(final String name, String owner) throws Exception {
- Referenceable referenceable = new Referenceable(DATASET_SUBTYPE);
- referenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
- referenceable.set(AtlasClient.NAME, name);
- referenceable.set("owner", owner);
- ClassType clsType = TypeSystem.getInstance().getDataType(ClassType.class, DATASET_SUBTYPE);
- return createInstance(referenceable, clsType);
- }
- private Id createInstance(Referenceable referenceable, ClassType clsType) throws Exception {
- ITypedReferenceableInstance typedInstance = clsType.convert(referenceable, Multiplicity.REQUIRED);
- List<String> guids = repository.createEntities(typedInstance).getCreatedEntities();
-
- // return the reference to created instance with guid
- return new Id(guids.get(guids.size() - 1), 0, referenceable.getTypeName());
- }
-
- private void setUpDefaultTypes() throws Exception {
- TypesDef typesDef = createDefaultTypeDefinitions();
- String typesAsJSON = TypesSerialization.toJson(typesDef);
- metadataService.createType(typesAsJSON);
- }
-
- TypesDef createDefaultTypeDefinitions() {
- HierarchicalTypeDefinition<ClassType> referenceableType = TypesUtil
- .createClassTypeDef(AtlasClient.REFERENCEABLE_SUPER_TYPE, ImmutableSet.<String>of(),
- new AttributeDefinition(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, true, true, null));
-
- HierarchicalTypeDefinition<ClassType> assetType = TypesUtil
- .createClassTypeDef(AtlasClient.ASSET_TYPE, ImmutableSet.<String>of(),
- new AttributeDefinition(AtlasClient.NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, false, true, null),
- TypesUtil.createOptionalAttrDef(AtlasClient.DESCRIPTION, DataTypes.STRING_TYPE),
- new AttributeDefinition(AtlasClient.OWNER, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, false, true, null));
-
- HierarchicalTypeDefinition<ClassType> infraType = TypesUtil
- .createClassTypeDef(AtlasClient.INFRASTRUCTURE_SUPER_TYPE,
- ImmutableSet.of(AtlasClient.REFERENCEABLE_SUPER_TYPE, AtlasClient.ASSET_TYPE));
-
- HierarchicalTypeDefinition<ClassType> datasetType = TypesUtil
- .createClassTypeDef(AtlasClient.DATA_SET_SUPER_TYPE,
- ImmutableSet.of(AtlasClient.REFERENCEABLE_SUPER_TYPE, AtlasClient.ASSET_TYPE));
-
- HierarchicalTypeDefinition<ClassType> processType = TypesUtil
- .createClassTypeDef(AtlasClient.PROCESS_SUPER_TYPE,
- ImmutableSet.of(AtlasClient.REFERENCEABLE_SUPER_TYPE, AtlasClient.ASSET_TYPE),
- new AttributeDefinition(PROCESS_ATTRIBUTE_INPUTS, DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
- Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition(PROCESS_ATTRIBUTE_OUTPUTS, DataTypes.arrayTypeName(AtlasClient.DATA_SET_SUPER_TYPE),
- Multiplicity.OPTIONAL, false, null));
-
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(referenceableType, assetType, infraType, datasetType, processType));
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java b/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
deleted file mode 100755
index 506d11c..0000000
--- a/repository/src/test/java/org/apache/atlas/RepositoryServiceLoadingTest.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.testng.Assert;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-/**
- * Unit test for Guice injector service loading
- *
- * Uses TestNG's Guice annotation to load the necessary modules and inject the
- * objects from Guice
- */
-@Guice(modules = TestModules.TestOnlyModule.class)
-public class RepositoryServiceLoadingTest {
-
- @Test
- public void testGetGraphService() throws Exception {
- Assert.assertNotNull(AtlasGraphProvider.getGraphInstance());
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/TestModules.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/TestModules.java b/repository/src/test/java/org/apache/atlas/TestModules.java
index 144f618..5c9e60f 100644
--- a/repository/src/test/java/org/apache/atlas/TestModules.java
+++ b/repository/src/test/java/org/apache/atlas/TestModules.java
@@ -26,24 +26,14 @@ import com.google.inject.multibindings.Multibinder;
import org.apache.atlas.annotation.GraphTransaction;
import org.apache.atlas.discovery.AtlasDiscoveryService;
import org.apache.atlas.discovery.AtlasLineageService;
-import org.apache.atlas.discovery.DataSetLineageService;
-import org.apache.atlas.discovery.DiscoveryService;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.discovery.EntityLineageService;
-import org.apache.atlas.discovery.LineageService;
-import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
import org.apache.atlas.graph.GraphSandboxUtil;
import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.listener.TypeDefChangeListener;
-import org.apache.atlas.listener.TypesChangeListener;
-import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.audit.EntityAuditListener;
import org.apache.atlas.repository.audit.EntityAuditRepository;
-import org.apache.atlas.repository.graph.DeleteHandler;
-import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
-import org.apache.atlas.repository.graph.HardDeleteHandler;
-import org.apache.atlas.repository.graph.SoftDeleteHandler;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.repository.impexp.ExportService;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
@@ -58,16 +48,9 @@ import org.apache.atlas.repository.store.graph.v1.DeleteHandlerV1;
import org.apache.atlas.repository.store.graph.v1.EntityGraphMapper;
import org.apache.atlas.repository.store.graph.v1.HardDeleteHandlerV1;
import org.apache.atlas.repository.store.graph.v1.SoftDeleteHandlerV1;
-import org.apache.atlas.repository.typestore.GraphBackedTypeStore;
-import org.apache.atlas.repository.typestore.ITypeStore;
-import org.apache.atlas.repository.typestore.StoreBackedTypeCache;
import org.apache.atlas.service.Service;
-import org.apache.atlas.services.DefaultMetadataService;
-import org.apache.atlas.services.MetadataService;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.types.TypeSystem;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
import org.apache.atlas.util.AtlasRepositoryConfiguration;
import org.apache.atlas.util.SearchTracker;
import org.apache.commons.configuration.Configuration;
@@ -89,13 +72,6 @@ public class TestModules {
private static final Logger LOG = LoggerFactory.getLogger(TestOnlyModule.class);
- static class TypeSystemProvider implements Provider<TypeSystem> {
- @Override
- public TypeSystem get() {
- return TypeSystem.getInstance();
- }
- }
-
static class AtlasConfigurationProvider implements Provider<Configuration> {
@Override
@@ -126,32 +102,19 @@ public class TestModules {
bind(AtlasGraph.class).toProvider(AtlasGraphProvider.class);
- // allow for dynamic binding of the metadata repo & graph service
- // bind the MetadataRepositoryService interface to an implementation
- bind(MetadataRepository.class).to(GraphBackedMetadataRepository.class).asEagerSingleton();
-
- bind(TypeSystem.class).toProvider(TypeSystemProvider.class).in(Singleton.class);
+ // allow for dynamic binding of graph service
bind(Configuration.class).toProvider(AtlasConfigurationProvider.class).in(Singleton.class);
- // bind the ITypeStore interface to an implementation
- bind(ITypeStore.class).to(GraphBackedTypeStore.class).asEagerSingleton();
+ // bind the AtlasTypeDefStore interface to an implementation
bind(AtlasTypeDefStore.class).to(AtlasTypeDefGraphStoreV1.class).asEagerSingleton();
bind(AtlasTypeRegistry.class).asEagerSingleton();
bind(EntityGraphMapper.class).asEagerSingleton();
bind(ExportService.class).asEagerSingleton();
- //GraphBackedSearchIndexer must be an eager singleton to force the search index creation to happen before
- //we try to restore the type system (otherwise we'll end up running queries
- //before we have any indices during the initial graph setup)
- Multibinder<TypesChangeListener> typesChangeListenerBinder =
- Multibinder.newSetBinder(binder(), TypesChangeListener.class);
- typesChangeListenerBinder.addBinding().to(GraphBackedSearchIndexer.class).asEagerSingleton();
-
// New typesdef/instance change listener should also be bound to the corresponding implementation
Multibinder<TypeDefChangeListener> typeDefChangeListenerMultibinder =
Multibinder.newSetBinder(binder(), TypeDefChangeListener.class);
- typeDefChangeListenerMultibinder.addBinding().to(DefaultMetadataService.class);
typeDefChangeListenerMultibinder.addBinding().to(GraphBackedSearchIndexer.class).asEagerSingleton();
bind(SearchTracker.class).asEagerSingleton();
@@ -159,19 +122,12 @@ public class TestModules {
bind(AtlasEntityStore.class).to(AtlasEntityStoreV1.class);
bind(AtlasRelationshipStore.class).to(AtlasRelationshipStoreV1.class);
- // bind the MetadataService interface to an implementation
- bind(MetadataService.class).to(DefaultMetadataService.class).asEagerSingleton();
-
// bind the DiscoveryService interface to an implementation
- bind(DiscoveryService.class).to(GraphBackedDiscoveryService.class).asEagerSingleton();
bind(AtlasDiscoveryService.class).to(EntityDiscoveryService.class).asEagerSingleton();
- bind(LineageService.class).to(DataSetLineageService.class).asEagerSingleton();
bind(AtlasLineageService.class).to(EntityLineageService.class).asEagerSingleton();
bind(BulkImporter.class).to(BulkImporterImpl.class).asEagerSingleton();
- bindTypeCache();
-
//Add EntityAuditListener as EntityChangeListener
Multibinder<EntityChangeListener> entityChangeListenerBinder =
Multibinder.newSetBinder(binder(), EntityChangeListener.class);
@@ -182,12 +138,7 @@ public class TestModules {
bindInterceptor(Matchers.any(), Matchers.annotatedWith(GraphTransaction.class), graphTransactionInterceptor);
}
- protected void bindTypeCache() {
- bind(TypeCache.class).to(AtlasRepositoryConfiguration.getTypeCache()).asEagerSingleton();
- }
-
protected void bindDeleteHandler(Binder binder) {
- binder.bind(DeleteHandler.class).to(AtlasRepositoryConfiguration.getDeleteHandlerImpl()).asEagerSingleton();
binder.bind(DeleteHandlerV1.class).to(AtlasRepositoryConfiguration.getDeleteHandlerV1Impl()).asEagerSingleton();
}
@@ -210,7 +161,6 @@ public class TestModules {
public static class SoftDeleteModule extends TestOnlyModule {
@Override
protected void bindDeleteHandler(Binder binder) {
- bind(DeleteHandler.class).to(SoftDeleteHandler.class).asEagerSingleton();
bind(DeleteHandlerV1.class).to(SoftDeleteHandlerV1.class).asEagerSingleton();
bind(AtlasEntityChangeNotifier.class).toProvider(MockNotifier.class);
}
@@ -219,20 +169,8 @@ public class TestModules {
public static class HardDeleteModule extends TestOnlyModule {
@Override
protected void bindDeleteHandler(Binder binder) {
- bind(DeleteHandler.class).to(HardDeleteHandler.class).asEagerSingleton();
bind(DeleteHandlerV1.class).to(HardDeleteHandlerV1.class).asEagerSingleton();
bind(AtlasEntityChangeNotifier.class).toProvider(MockNotifier.class);
}
}
-
- /**
- * Guice module which sets TypeCache implementation class configuration property to {@link StoreBackedTypeCache}.
- *
- */
- public static class StoreBackedTypeCacheTestModule extends TestOnlyModule {
- @Override
- protected void bindTypeCache() {
- bind(TypeCache.class).to(StoreBackedTypeCache.class).asEagerSingleton();
- }
- }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/java/org/apache/atlas/TestUtils.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/TestUtils.java b/repository/src/test/java/org/apache/atlas/TestUtils.java
deleted file mode 100755
index 56bfb82..0000000
--- a/repository/src/test/java/org/apache/atlas/TestUtils.java
+++ /dev/null
@@ -1,778 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-import org.apache.atlas.annotation.GraphTransaction;
-import org.apache.atlas.listener.EntityChangeListener;
-import org.apache.atlas.listener.TypesChangeListener;
-import org.apache.atlas.repository.MetadataRepository;
-import org.apache.atlas.repository.audit.InMemoryEntityAuditRepository;
-import org.apache.atlas.repository.graph.AtlasGraphProvider;
-import org.apache.atlas.repository.graph.GraphBackedMetadataRepository;
-import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
-import org.apache.atlas.repository.graph.GraphHelper;
-import org.apache.atlas.repository.graphdb.AtlasGraph;
-import org.apache.atlas.repository.graphdb.GremlinVersion;
-import org.apache.atlas.repository.typestore.GraphBackedTypeStore;
-import org.apache.atlas.repository.typestore.ITypeStore;
-import org.apache.atlas.services.DefaultMetadataService;
-import org.apache.atlas.services.MetadataService;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IInstance;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.InstanceSerialization;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.types.*;
-import org.apache.atlas.typesystem.types.DataTypes.TypeCategory;
-import org.apache.atlas.typesystem.types.cache.DefaultTypeCache;
-import org.apache.atlas.typesystem.types.cache.TypeCache;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
-import org.apache.atlas.util.AtlasRepositoryConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.lang.RandomStringUtils;
-import org.codehaus.jettison.json.JSONArray;
-import org.testng.Assert;
-import org.testng.SkipException;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.atlas.typesystem.types.utils.TypesUtil.*;
-import static org.testng.Assert.assertEquals;
-
-/**
- * Test utility class.
- */
-public final class TestUtils {
-
- public static final long TEST_DATE_IN_LONG = 1418265358440L;
-
-
- public static final String EMPLOYEES_ATTR = "employees";
- public static final String DEPARTMENT_ATTR = "department";
- public static final String ASSETS_ATTR = "assets";
-
- public static final String POSITIONS_ATTR = "positions";
- public static final String ASSET_TYPE = "TestAsset";
-
- public static final String DATABASE_TYPE = "hive_database";
- public static final String DATABASE_NAME = "foo";
- public static final String TABLE_TYPE = "hive_table";
- public static final String PROCESS_TYPE = "hive_process";
- public static final String COLUMN_TYPE = "column_type";
- public static final String TABLE_NAME = "bar";
- public static final String CLASSIFICATION = "classification";
- public static final String PII = "PII";
- public static final String SUPER_TYPE_NAME = "Base";
- public static final String STORAGE_DESC_TYPE = "hive_storagedesc";
- public static final String PARTITION_STRUCT_TYPE = "partition_struct_type";
- public static final String PARTITION_CLASS_TYPE = "partition_class_type";
- public static final String SERDE_TYPE = "serdeType";
- public static final String COLUMNS_MAP = "columnsMap";
- public static final String COLUMNS_ATTR_NAME = "columns";
-
- public static final String NAME = "name";
-
- private TestUtils() {
- }
-
- /**
- * Dumps the graph in GSON format in the path returned.
- *
- * @param graph handle to graph
- * @return path to the dump file
- * @throws Exception
- */
- public static String dumpGraph(AtlasGraph<?,?> graph) throws Exception {
- File tempFile = File.createTempFile("graph", ".gson");
- System.out.println("tempFile.getPath() = " + tempFile.getPath());
- GraphHelper.dumpToLog(graph);
- FileOutputStream os = null;
- try {
- os = new FileOutputStream(tempFile);
- graph.exportToGson(os);
- }
- finally {
- if(os != null) {
- try {
- os.close();
- }
- catch(IOException e) {
- e.printStackTrace();
- }
- }
- }
-
- return tempFile.getPath();
- }
-
- /**
- * Class Hierarchy is:
- * Department(name : String, employees : Array[Person])
- * Person(name : String, department : Department, manager : Manager)
- * Manager(subordinates : Array[Person]) extends Person
- * <p/>
- * Persons can have SecurityClearance(level : Int) clearance.
- */
- public static void defineDeptEmployeeTypes(TypeSystem ts) throws AtlasException {
-
- String _description = "_description";
- EnumTypeDefinition orgLevelEnum =
- new EnumTypeDefinition("OrgLevel", "OrgLevel"+_description, new EnumValue("L1", 1), new EnumValue("L2", 2));
-
- StructTypeDefinition addressDetails =
- createStructTypeDef("Address", "Address"+_description, createRequiredAttrDef("street", DataTypes.STRING_TYPE),
- createRequiredAttrDef("city", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> deptTypeDef = createClassTypeDef(DEPARTMENT_TYPE, "Department"+_description, ImmutableSet.<String>of(),
- createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- new AttributeDefinition(EMPLOYEES_ATTR, String.format("array<%s>", "Person"), Multiplicity.OPTIONAL,
- true, DEPARTMENT_ATTR),
- new AttributeDefinition(POSITIONS_ATTR, String.format("map<%s,%s>", DataTypes.STRING_TYPE.getName(), "Person"), Multiplicity.OPTIONAL,
- false, null)
- );
-
- HierarchicalTypeDefinition<ClassType> personTypeDef = createClassTypeDef("Person", "Person"+_description, ImmutableSet.<String>of(),
- createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createOptionalAttrDef("orgLevel", "OrgLevel"),
- createOptionalAttrDef("address", "Address"),
- new AttributeDefinition(DEPARTMENT_ATTR, "Department", Multiplicity.REQUIRED, false, EMPLOYEES_ATTR),
- new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates"),
- new AttributeDefinition("mentor", "Person", Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition(ASSETS_ATTR, String.format("array<%s>", ASSET_TYPE) , Multiplicity.OPTIONAL, false, null),
- createOptionalAttrDef("birthday", DataTypes.DATE_TYPE),
- createOptionalAttrDef("hasPets", DataTypes.BOOLEAN_TYPE),
- createOptionalAttrDef("numberOfCars", DataTypes.BYTE_TYPE),
- createOptionalAttrDef("houseNumber", DataTypes.SHORT_TYPE),
- createOptionalAttrDef("carMileage", DataTypes.INT_TYPE),
- createOptionalAttrDef("shares", DataTypes.LONG_TYPE),
- createOptionalAttrDef("salary", DataTypes.DOUBLE_TYPE),
- createOptionalAttrDef("age", DataTypes.FLOAT_TYPE),
- createOptionalAttrDef("numberOfStarsEstimate", DataTypes.BIGINTEGER_TYPE),
- createOptionalAttrDef("approximationOfPi", DataTypes.BIGDECIMAL_TYPE),
- createOptionalAttrDef("isOrganDonor", DataTypes.BOOLEAN_TYPE)
- );
-
-
- HierarchicalTypeDefinition<ClassType> assetTypeDef = createClassTypeDef(ASSET_TYPE, "Asset"+_description, ImmutableSet.<String>of(),
- createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- new AttributeDefinition("childAssets", String.format("array<%s>", ASSET_TYPE) , Multiplicity.OPTIONAL, false, null)
- );
-
- HierarchicalTypeDefinition<ClassType> managerTypeDef = createClassTypeDef("Manager", "Manager"+_description, ImmutableSet.of("Person"),
- new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), Multiplicity.COLLECTION,
- false, "manager"));
-
- HierarchicalTypeDefinition<TraitType> securityClearanceTypeDef =
- createTraitTypeDef("SecurityClearance", "SecurityClearance"+_description, ImmutableSet.<String>of(),
- createRequiredAttrDef("level", DataTypes.INT_TYPE));
-
- ts.defineTypes(ImmutableList.of(orgLevelEnum), ImmutableList.of(addressDetails),
- ImmutableList.of(securityClearanceTypeDef),
- ImmutableList.of(deptTypeDef, personTypeDef, managerTypeDef, assetTypeDef));
- }
-
- public static final String DEPARTMENT_TYPE = "Department";
- public static final String PERSON_TYPE = "Person";
-
- public static ITypedReferenceableInstance createDeptEg1(TypeSystem ts) throws AtlasException {
- Referenceable hrDept = new Referenceable(DEPARTMENT_TYPE);
- Referenceable john = new Referenceable(PERSON_TYPE);
-
- Referenceable jane = new Referenceable("Manager", "SecurityClearance");
- Referenceable johnAddr = new Referenceable("Address");
- Referenceable janeAddr = new Referenceable("Address");
- Referenceable julius = new Referenceable("Manager");
- Referenceable juliusAddr = new Referenceable("Address");
- Referenceable max = new Referenceable("Person");
- Referenceable maxAddr = new Referenceable("Address");
-
- hrDept.set(NAME, "hr");
- john.set(NAME, "John");
- john.set(DEPARTMENT_ATTR, hrDept);
- johnAddr.set("street", "Stewart Drive");
- johnAddr.set("city", "Sunnyvale");
- john.set("address", johnAddr);
-
- john.set("birthday",new Date(1950, 5, 15));
- john.set("isOrganDonor", true);
- john.set("hasPets", true);
- john.set("numberOfCars", 1);
- john.set("houseNumber", 153);
- john.set("carMileage", 13364);
- john.set("shares", 15000);
- john.set("salary", 123345.678);
- john.set("age", 50);
- john.set("numberOfStarsEstimate", new BigInteger("1000000000000000000000"));
- john.set("approximationOfPi", new BigDecimal("3.141592653589793238462643383279502884197169399375105820974944592307816406286"));
-
- jane.set(NAME, "Jane");
- jane.set(DEPARTMENT_ATTR, hrDept);
- janeAddr.set("street", "Great America Parkway");
- janeAddr.set("city", "Santa Clara");
- jane.set("address", janeAddr);
- janeAddr.set("street", "Great America Parkway");
-
- julius.set(NAME, "Julius");
- julius.set(DEPARTMENT_ATTR, hrDept);
- juliusAddr.set("street", "Madison Ave");
- juliusAddr.set("city", "Newtonville");
- julius.set("address", juliusAddr);
- julius.set("subordinates", ImmutableList.<Referenceable>of());
-
- max.set(NAME, "Max");
- max.set(DEPARTMENT_ATTR, hrDept);
- maxAddr.set("street", "Ripley St");
- maxAddr.set("city", "Newton");
- max.set("address", maxAddr);
- max.set("manager", jane);
- max.set("mentor", julius);
- max.set("birthday",new Date(1979, 3, 15));
- max.set("isOrganDonor", true);
- max.set("hasPets", true);
- max.set("age", 36);
- max.set("numberOfCars", 2);
- max.set("houseNumber", 17);
- max.set("carMileage", 13);
- max.set("shares", Long.MAX_VALUE);
- max.set("salary", Double.MAX_VALUE);
- max.set("numberOfStarsEstimate", new BigInteger("1000000000000000000000000000000"));
- max.set("approximationOfPi", new BigDecimal("3.1415926535897932"));
-
- john.set("manager", jane);
- john.set("mentor", max);
- hrDept.set(EMPLOYEES_ATTR, ImmutableList.of(john, jane, julius, max));
-
- jane.set("subordinates", ImmutableList.of(john, max));
-
- jane.getTrait("SecurityClearance").set("level", 1);
-
- ClassType deptType = ts.getDataType(ClassType.class, "Department");
- ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
- Assert.assertNotNull(hrDept2);
-
- return hrDept2;
- }
-
-
-
- public static TypesDef simpleType(){
- HierarchicalTypeDefinition<ClassType> superTypeDefinition =
- createClassTypeDef("h_type", ImmutableSet.<String>of(),
- createOptionalAttrDef("attr", DataTypes.STRING_TYPE));
-
- StructTypeDefinition structTypeDefinition = new StructTypeDefinition("s_type", "structType",
- new AttributeDefinition[]{createRequiredAttrDef(NAME, DataTypes.STRING_TYPE)});
-
- HierarchicalTypeDefinition<TraitType> traitTypeDefinition =
- createTraitTypeDef("t_type", "traitType", ImmutableSet.<String>of());
-
- EnumValue values[] = {new EnumValue("ONE", 1),};
-
- EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("e_type", "enumType", values);
- return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
- ImmutableList.of(traitTypeDefinition), ImmutableList.of(superTypeDefinition));
- }
-
- public static TypesDef simpleTypeUpdated(){
- HierarchicalTypeDefinition<ClassType> superTypeDefinition =
- createClassTypeDef("h_type", ImmutableSet.<String>of(),
- createOptionalAttrDef("attr", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<ClassType> newSuperTypeDefinition =
- createClassTypeDef("new_h_type", ImmutableSet.<String>of(),
- createOptionalAttrDef("attr", DataTypes.STRING_TYPE));
-
- StructTypeDefinition structTypeDefinition = new StructTypeDefinition("s_type", "structType",
- new AttributeDefinition[]{createRequiredAttrDef(NAME, DataTypes.STRING_TYPE)});
-
- HierarchicalTypeDefinition<TraitType> traitTypeDefinition =
- createTraitTypeDef("t_type", "traitType", ImmutableSet.<String>of());
-
- EnumValue values[] = {new EnumValue("ONE", 1),};
-
- EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("e_type", "enumType", values);
- return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), ImmutableList.of(structTypeDefinition),
- ImmutableList.of(traitTypeDefinition), ImmutableList.of(superTypeDefinition, newSuperTypeDefinition));
- }
-
- public static TypesDef simpleTypeUpdatedDiff() {
- HierarchicalTypeDefinition<ClassType> newSuperTypeDefinition =
- createClassTypeDef("new_h_type", ImmutableSet.<String>of(),
- createOptionalAttrDef("attr", DataTypes.STRING_TYPE));
-
- return TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.of(newSuperTypeDefinition));
- }
-
- public static TypesDef defineHiveTypes() {
- String _description = "_description";
- HierarchicalTypeDefinition<ClassType> superTypeDefinition =
- createClassTypeDef(SUPER_TYPE_NAME, ImmutableSet.<String>of(),
- createOptionalAttrDef("namespace", DataTypes.STRING_TYPE),
- createOptionalAttrDef("cluster", DataTypes.STRING_TYPE),
- createOptionalAttrDef("colo", DataTypes.STRING_TYPE));
- HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
- createClassTypeDef(DATABASE_TYPE, DATABASE_TYPE + _description,ImmutableSet.of(SUPER_TYPE_NAME),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createOptionalAttrDef("created", DataTypes.DATE_TYPE),
- createOptionalAttrDef("isReplicated", DataTypes.BOOLEAN_TYPE),
- new AttributeDefinition("parameters", new DataTypes.MapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
- createRequiredAttrDef("description", DataTypes.STRING_TYPE));
-
-
- StructTypeDefinition structTypeDefinition = new StructTypeDefinition("serdeType", "serdeType" + _description,
- new AttributeDefinition[]{createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createRequiredAttrDef("serde", DataTypes.STRING_TYPE),
- createOptionalAttrDef("description", DataTypes.STRING_TYPE)});
-
- EnumValue values[] = {new EnumValue("MANAGED", 1), new EnumValue("EXTERNAL", 2),};
-
- EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", "tableType" + _description, values);
-
- HierarchicalTypeDefinition<ClassType> columnsDefinition =
- createClassTypeDef(COLUMN_TYPE, ImmutableSet.<String>of(),
- createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createRequiredAttrDef("type", DataTypes.STRING_TYPE));
-
- StructTypeDefinition partitionDefinition = new StructTypeDefinition("partition_struct_type", "partition_struct_type" + _description,
- new AttributeDefinition[]{createRequiredAttrDef(NAME, DataTypes.STRING_TYPE),});
-
- AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
- new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false,
- null),
- new AttributeDefinition("numBuckets", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- };
-
- HierarchicalTypeDefinition<ClassType> storageDescClsDef =
- new HierarchicalTypeDefinition<>(ClassType.class, STORAGE_DESC_TYPE, STORAGE_DESC_TYPE + _description,
- ImmutableSet.of(SUPER_TYPE_NAME), attributeDefinitions);
-
- AttributeDefinition[] partClsAttributes = new AttributeDefinition[]{
- new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
- Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition("table", TABLE_TYPE, Multiplicity.REQUIRED, false, null),
- new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
- null),
- new AttributeDefinition("sd", STORAGE_DESC_TYPE, Multiplicity.REQUIRED, true,
- null),
- new AttributeDefinition("columns", DataTypes.arrayTypeName(COLUMN_TYPE),
- Multiplicity.OPTIONAL, true, null),
- new AttributeDefinition("parameters", new DataTypes.MapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),};
-
- HierarchicalTypeDefinition<ClassType> partClsDef =
- new HierarchicalTypeDefinition<>(ClassType.class, "partition_class_type", "partition_class_type" + _description,
- ImmutableSet.of(SUPER_TYPE_NAME), partClsAttributes);
-
- HierarchicalTypeDefinition<ClassType> processClsType =
- new HierarchicalTypeDefinition<>(ClassType.class, PROCESS_TYPE, PROCESS_TYPE + _description,
- ImmutableSet.<String>of(), new AttributeDefinition[]{
- new AttributeDefinition("outputs", "array<" + TABLE_TYPE + ">", Multiplicity.OPTIONAL, false, null)
- });
-
- HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
- createClassTypeDef(TABLE_TYPE, TABLE_TYPE + _description, ImmutableSet.of(SUPER_TYPE_NAME),
- TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE),
- createRequiredAttrDef("description", DataTypes.STRING_TYPE),
- createRequiredAttrDef("type", DataTypes.STRING_TYPE),
- createOptionalAttrDef("created", DataTypes.DATE_TYPE),
- // enum
- new AttributeDefinition("tableType", "tableType", Multiplicity.REQUIRED, false, null),
- // array of strings
- new AttributeDefinition("columnNames",
- String.format("array<%s>", DataTypes.STRING_TYPE.getName()), Multiplicity.OPTIONAL,
- false, null),
- // array of classes
- new AttributeDefinition("columns", String.format("array<%s>", COLUMN_TYPE),
- Multiplicity.OPTIONAL, true, null),
- // array of structs
- new AttributeDefinition("partitions", String.format("array<%s>", "partition_struct_type"),
- Multiplicity.OPTIONAL, true, null),
- // map of primitives
- new AttributeDefinition("parametersMap",
- DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE.getName()),
- Multiplicity.OPTIONAL, true, null),
- //map of classes -
- new AttributeDefinition(COLUMNS_MAP,
- DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- COLUMN_TYPE),
- Multiplicity.OPTIONAL, true, null),
- //map of structs
- new AttributeDefinition("partitionsMap",
- DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
- "partition_struct_type"),
- Multiplicity.OPTIONAL, true, null),
- // struct reference
- new AttributeDefinition("serde1", "serdeType", Multiplicity.OPTIONAL, false, null),
- new AttributeDefinition("serde2", "serdeType", Multiplicity.OPTIONAL, false, null),
- // class reference
- new AttributeDefinition("database", DATABASE_TYPE, Multiplicity.REQUIRED, false, null),
- //class reference as composite
- new AttributeDefinition("databaseComposite", DATABASE_TYPE, Multiplicity.OPTIONAL, true, null));
-
- HierarchicalTypeDefinition<TraitType> piiTypeDefinition =
- createTraitTypeDef(PII, PII + _description, ImmutableSet.<String>of());
-
- HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
- createTraitTypeDef(CLASSIFICATION, CLASSIFICATION + _description, ImmutableSet.<String>of(),
- createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
-
- HierarchicalTypeDefinition<TraitType> fetlClassificationTypeDefinition =
- createTraitTypeDef("fetl" + CLASSIFICATION, "fetl" + CLASSIFICATION + _description, ImmutableSet.of(CLASSIFICATION),
- createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
-
- return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition),
- ImmutableList.of(structTypeDefinition, partitionDefinition),
- ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition, piiTypeDefinition),
- ImmutableList.of(superTypeDefinition, databaseTypeDefinition, columnsDefinition, tableTypeDefinition,
- storageDescClsDef, partClsDef, processClsType));
- }
-
- public static Collection<IDataType> createHiveTypes(TypeSystem typeSystem) throws Exception {
- if (!typeSystem.isRegistered(TABLE_TYPE)) {
- TypesDef typesDef = defineHiveTypes();
- return typeSystem.defineTypes(typesDef).values();
- }
- return new ArrayList<>();
- }
-
- public static final String randomString() {
- return randomString(10);
- }
-
- public static final String randomString(int count) {
- final String prefix = "r";
-
- return prefix + RandomStringUtils.randomAlphanumeric(count - prefix.length()); // ensure that the string starts with a letter
- }
-
- public static Referenceable createDBEntity() {
- Referenceable entity = new Referenceable(DATABASE_TYPE);
- String dbName = RandomStringUtils.randomAlphanumeric(10);
- entity.set(NAME, dbName);
- entity.set("description", "us db");
- return entity;
- }
-
- public static Referenceable createTableEntity(String dbId) {
- Referenceable entity = new Referenceable(TABLE_TYPE);
- String tableName = RandomStringUtils.randomAlphanumeric(10);
- entity.set(NAME, tableName);
- entity.set("description", "random table");
- entity.set("type", "type");
- entity.set("tableType", "MANAGED");
- entity.set("database", new Id(dbId, 0, DATABASE_TYPE));
- entity.set("created", new Date());
- return entity;
- }
-
- public static Referenceable createColumnEntity() {
- Referenceable entity = new Referenceable(COLUMN_TYPE);
- entity.set(NAME, RandomStringUtils.randomAlphanumeric(10));
- entity.set("type", "VARCHAR(32)");
- return entity;
- }
-
- /**
- * Creates an entity in the graph and does basic validation
- * of the GuidMapping that was created in the process.
- *
- */
- public static String createInstance(MetadataService metadataService, Referenceable entity) throws Exception {
- RequestContext.createContext();
-
- String entityjson = InstanceSerialization.toJson(entity, true);
- JSONArray entitiesJson = new JSONArray();
- entitiesJson.put(entityjson);
- CreateUpdateEntitiesResult creationResult = metadataService.createEntities(entitiesJson.toString());
- Map<String,String> guidMap = creationResult.getGuidMapping().getGuidAssignments();
- Map<Id, Referenceable> referencedObjects = findReferencedObjects(entity);
-
- for(Map.Entry<Id,Referenceable> entry : referencedObjects.entrySet()) {
- Id foundId = entry.getKey();
- if(foundId.isUnassigned()) {
- String guid = guidMap.get(entry.getKey()._getId());
- Referenceable obj = entry.getValue();
- loadAndDoSimpleValidation(guid,obj, metadataService);
- }
- }
- List<String> guids = creationResult.getCreatedEntities();
- if (guids != null && guids.size() > 0) {
- return guids.get(guids.size() - 1);
- }
- return null;
- }
-
- private static Map<Id,Referenceable> findReferencedObjects(Referenceable ref) {
- Map<Id, Referenceable> result = new HashMap<>();
- findReferencedObjects(ref, result);
- return result;
- }
-
- private static void findReferencedObjects(Referenceable ref, Map<Id, Referenceable> seen) {
-
- Id guid = ref.getId();
- if(seen.containsKey(guid)) {
- return;
- }
- seen.put(guid, ref);
- for(Map.Entry<String, Object> attr : ref.getValuesMap().entrySet()) {
- Object value = attr.getValue();
- if(value instanceof Referenceable) {
- findReferencedObjects((Referenceable)value, seen);
- }
- else if(value instanceof List) {
- for(Object o : (List)value) {
- if(o instanceof Referenceable) {
- findReferencedObjects((Referenceable)o, seen);
- }
- }
- }
- else if(value instanceof Map) {
- for(Object o : ((Map)value).values()) {
- if(o instanceof Referenceable) {
- findReferencedObjects((Referenceable)o, seen);
- }
- }
- }
- }
- }
-
- /**
- * Clears the state in the request context.
- *
- */
- public static void resetRequestContext() {
- //reset the context while preserving the user
- String user = RequestContext.get().getUser();
- RequestContext.createContext();
- RequestContext.get().setUser(user);
- }
-
- /**
- * Triggers the Atlas initialization process using the specified MetadataRepository.
- * This causes the built-in types and their indices to be created.
- */
- public static void setupGraphProvider(MetadataRepository repo) throws AtlasException {
- TypeCache typeCache = null;
- try {
- typeCache = AtlasRepositoryConfiguration.getTypeCache().newInstance();
- }
- catch(Throwable t) {
- typeCache = new DefaultTypeCache();
- }
- final GraphBackedSearchIndexer indexer = new GraphBackedSearchIndexer(new AtlasTypeRegistry());
-
- Configuration config = ApplicationProperties.get();
- ITypeStore typeStore = new GraphBackedTypeStore(AtlasGraphProvider.getGraphInstance());
- DefaultMetadataService defaultMetadataService = new DefaultMetadataService(repo,
- typeStore,
- new HashSet<TypesChangeListener>() {{ add(indexer); }},
- new HashSet<EntityChangeListener>(),
- TypeSystem.getInstance(),
- config,
- typeCache,
- // Fixme: Can we work with Noop
- new InMemoryEntityAuditRepository());
-
- //commit the created types
- getGraph().commit();
-
- }
-
- public static AtlasGraph getGraph() {
-
- return AtlasGraphProvider.getGraphInstance();
-
- }
-
- /**
- * Adds a proxy wrapper around the specified MetadataService that automatically
- * resets the request context before every call.
- *
- * @param delegate
- * @return
- */
- public static MetadataService addSessionCleanupWrapper(final MetadataService delegate) {
-
- return (MetadataService)Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(),
- new Class[]{MetadataService.class}, new InvocationHandler() {
-
- @Override
- public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
-
- try {
- resetRequestContext();
- Object result = method.invoke(delegate, args);
-
- return result;
- }
- catch(InvocationTargetException e) {
- e.getCause().printStackTrace();
- throw e.getCause();
- }
- catch(Throwable t) {
- t.printStackTrace();
- throw t;
- }
- }
-
- });
- }
-
- /**
- * Adds a proxy wrapper around the specified MetadataRepository that automatically
- * resets the request context before every call and either commits or rolls
- * back the graph transaction after every call.
- *
- * @param delegate
- * @return
- */
- public static MetadataRepository addTransactionWrapper(final MetadataRepository delegate) {
- return (MetadataRepository)Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(),
- new Class[]{MetadataRepository.class}, new InvocationHandler() {
-
- @Override
- public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
- boolean useTransaction = GraphBackedMetadataRepository.class.getMethod(
- method.getName(), method.getParameterTypes())
- .isAnnotationPresent(GraphTransaction.class);
- try {
- resetRequestContext();
- Object result = method.invoke(delegate, args);
- if(useTransaction) {
- System.out.println("Committing changes");
- getGraph().commit();
- System.out.println("Commit succeeded.");
- }
- return result;
- }
- catch(InvocationTargetException e) {
- e.getCause().printStackTrace();
- if(useTransaction) {
- System.out.println("Rolling back changes due to exception.");
- getGraph().rollback();
- }
- throw e.getCause();
- }
- catch(Throwable t) {
- t.printStackTrace();
- if(useTransaction) {
- System.out.println("Rolling back changes due to exception.");
- getGraph().rollback();
- }
- throw t;
- }
- }
-
- });
- }
-
- /**
- * Loads the entity and does sanity testing of the GuidMapping that was
- * created during the operation.
- *
- */
- public static ITypedReferenceableInstance loadAndDoSimpleValidation(String guid, Referenceable original, MetadataRepository repositoryService) throws AtlasException {
- ITypedReferenceableInstance loaded = repositoryService.getEntityDefinition(guid);
- doSimpleValidation(original, loaded);
- return loaded;
- }
-
- /**
- * Loads the entity and does sanity testing of the GuidMapping that was
- * created during the operation.
- *
- */
- public static ITypedReferenceableInstance loadAndDoSimpleValidation(String guid, Referenceable original, MetadataService repositoryService) throws AtlasException {
- ITypedReferenceableInstance loaded = repositoryService.getEntityDefinition(guid);
- doSimpleValidation(original, loaded);
- return loaded;
-
- }
-
- private static void doSimpleValidation(Referenceable original, IInstance loaded) throws AtlasException {
-
- assertEquals(loaded.getTypeName(), original.getTypeName());
- ClassType ct = TypeSystem.getInstance().getDataType(ClassType.class, loaded.getTypeName());
-
- //compare primitive fields
- for(AttributeInfo field : ct.fieldMapping.fields.values()) {
- if(field.dataType().getTypeCategory() == TypeCategory.PRIMITIVE) {
- if(original.get(field.name) != null) {
- Object rawLoadedValue = loaded.get(field.name);
- Object rawProvidedValue = original.get(field.name);
- Object convertedLoadedValue = field.dataType().convert(rawLoadedValue, Multiplicity.REQUIRED);
- Object convertedProvidedValue = field.dataType().convert(rawProvidedValue, Multiplicity.REQUIRED);
-
- assertEquals(convertedLoadedValue, convertedProvidedValue);
- }
- }
- }
- }
-
- /**
- * Validates that the two String Collections contain the same items, without
- * regard to order.
- *
- */
- public static void assertContentsSame(Collection<String> actual, Collection<String> expected) {
- assertEquals(actual.size(), expected.size());
- Set<String> checker = new HashSet<>();
- checker.addAll(expected);
- checker.removeAll(actual);
- assertEquals(checker.size(), 0);
- }
-
- public static void skipForGremlin3EnabledGraphDb() throws SkipException {
- //ATLAS-1579 Currently, some tests are skipped for titan1 backened. As these tests are hard coded to use Gremlin2. See ATLAS-1579, ATLAS-1591 once it is fixed, please remove it.
- if (TestUtils.getGraph().getSupportedGremlinVersion() == GremlinVersion.THREE) {
- throw new SkipException ("This test requires Gremlin2. Skipping test ");
- }
- }
-
-}
[32/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEntityFormatConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEntityFormatConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEntityFormatConverter.java
index 1ce6168..a529dc1 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEntityFormatConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEntityFormatConverter.java
@@ -18,21 +18,20 @@
package org.apache.atlas.repository.converters;
import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.Status;
import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.atlas.v1.model.instance.AtlasSystemAttributes;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.Id.EntityState;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
@@ -57,37 +56,29 @@ public class AtlasEntityFormatConverter extends AtlasStructFormatConverter {
if (v1Obj != null) {
AtlasEntityType entityType = (AtlasEntityType) type;
- if (v1Obj instanceof IReferenceableInstance) {
- IReferenceableInstance entRef = (IReferenceableInstance) v1Obj;
+ if (v1Obj instanceof Referenceable) {
+ Referenceable entRef = (Referenceable)v1Obj;
- String guid = entRef.getId()._getId();
+ String guid = entRef.getId().getId();
if (!context.entityExists(guid)) {
- Map<String, Object> v1Attribs = null;
+ entity = new AtlasEntity(entRef.getTypeName(), super.fromV1ToV2(entityType, entRef.getValues(), context));
- try {
- v1Attribs = entRef.getValuesMap();
- } catch (AtlasException excp) {
- LOG.error("IReferenceableInstance.getValuesMap() failed", excp);
- }
-
- entity = new AtlasEntity(entRef.getTypeName(),
- super.fromV1ToV2(entityType, v1Attribs, context));
- entity.setGuid(entRef.getId()._getId());
+ entity.setGuid(entRef.getId().getId());
entity.setStatus(convertState(entRef.getId().getState()));
- entity.setCreatedBy(entRef.getSystemAttributes().createdBy);
- entity.setCreateTime(entRef.getSystemAttributes().createdTime);
- entity.setUpdatedBy(entRef.getSystemAttributes().modifiedBy);
- entity.setUpdateTime(entRef.getSystemAttributes().modifiedTime);
- entity.setVersion((long) entRef.getId().version);
+ entity.setCreatedBy(entRef.getSystemAttributes().getCreatedBy());
+ entity.setCreateTime(entRef.getSystemAttributes().getCreatedTime());
+ entity.setUpdatedBy(entRef.getSystemAttributes().getModifiedBy());
+ entity.setUpdateTime(entRef.getSystemAttributes().getModifiedTime());
+ entity.setVersion((long) entRef.getId().getVersion());
- if (CollectionUtils.isNotEmpty(entRef.getTraits())) {
+ if (CollectionUtils.isNotEmpty(entRef.getTraitNames())) {
List<AtlasClassification> classifications = new ArrayList<>();
- AtlasFormatConverter traitConverter = converterRegistry.getConverter(TypeCategory.CLASSIFICATION);
+ AtlasFormatConverter traitConverter = converterRegistry.getConverter(TypeCategory.CLASSIFICATION);
- for (String traitName : entRef.getTraits()) {
- IStruct trait = entRef.getTrait(traitName);
- AtlasType classifiType = typeRegistry.getType(traitName);
+ for (String traitName : entRef.getTraitNames()) {
+ Struct trait = entRef.getTraits().get(traitName);
+ AtlasType classifiType = typeRegistry.getType(traitName);
AtlasClassification classification = (AtlasClassification) traitConverter.fromV1ToV2(trait, classifiType, context);
classifications.add(classification);
@@ -95,26 +86,16 @@ public class AtlasEntityFormatConverter extends AtlasStructFormatConverter {
entity.setClassifications(classifications);
}
- } else {
- entity = context.getById(guid);
}
+
} else {
- throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "IReferenceableInstance",
+ throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "Referenceable",
v1Obj.getClass().getCanonicalName());
}
}
return entity;
}
- private Status convertState(EntityState state){
- Status status = Status.ACTIVE;
- if(state != null && state.equals(EntityState.DELETED)){
- status = Status.DELETED;
- }
- LOG.debug("Setting state to {}", state);
- return status;
- }
-
@Override
public Object fromV2ToV1(Object v2Obj, AtlasType type, ConverterContext context) throws AtlasBaseException {
Object ret = null;
@@ -141,9 +122,25 @@ public class AtlasEntityFormatConverter extends AtlasStructFormatConverter {
} else if (v2Obj instanceof AtlasEntity) {
AtlasEntity entity = (AtlasEntity) v2Obj;
- ret = new Referenceable(entity.getGuid(), entity.getTypeName(),
- fromV2ToV1(entityType, entity.getAttributes(), context));
+ Referenceable referenceable = new Referenceable(entity.getGuid(), entity.getTypeName(),
+ fromV2ToV1(entityType, entity.getAttributes(), context),
+ new AtlasSystemAttributes(entity.getCreatedBy(), entity.getUpdatedBy(), entity.getCreateTime(), entity.getUpdateTime()));
+ if (CollectionUtils.isNotEmpty(entity.getClassifications())) {
+ for (AtlasClassification classification : entity.getClassifications()) {
+ String traitName = classification.getTypeName();
+ AtlasClassificationType classificationType = typeRegistry.getClassificationTypeByName(traitName);
+ AtlasFormatConverter formatConverter = classificationType != null ? converterRegistry.getConverter(classificationType.getTypeCategory()) : null;
+ Struct trait = formatConverter != null ? (Struct)formatConverter.fromV2ToV1(classification, classificationType, context) : null;
+
+ if (trait != null) {
+ referenceable.getTraitNames().add(trait.getTypeName());
+ referenceable.getTraits().put(trait.getTypeName(), trait);
+ }
+ }
+ }
+
+ ret = referenceable;
} else if (v2Obj instanceof AtlasObjectId) { // transient-id
AtlasEntity entity = context.getById(((AtlasObjectId) v2Obj).getGuid());
if ( entity == null) {
@@ -158,4 +155,8 @@ public class AtlasEntityFormatConverter extends AtlasStructFormatConverter {
}
return ret;
}
+
+ private Status convertState(Id.EntityState state){
+ return (state != null && state.equals(Id.EntityState.DELETED)) ? Status.DELETED : Status.ACTIVE;
+ }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEnumFormatConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEnumFormatConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEnumFormatConverter.java
index 734bd0c..da76c5a 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEnumFormatConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasEnumFormatConverter.java
@@ -21,10 +21,10 @@ package org.apache.atlas.repository.converters;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.typedef.AtlasEnumDef.AtlasEnumElementDef;
+import org.apache.atlas.v1.model.typedef.EnumTypeDefinition.EnumValue;
import org.apache.atlas.type.AtlasEnumType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.types.EnumValue;
import java.util.Map;
@@ -47,10 +47,10 @@ public class AtlasEnumFormatConverter extends AtlasAbstractFormatConverter {
if (v1Obj instanceof EnumValue) {
EnumValue enumValue = (EnumValue)v1Obj;
- v1Value = enumValue.value;
+ v1Value = enumValue.getValue();
if (v1Value == null) {
- v1Value = enumValue.ordinal;
+ v1Value = enumValue.getOrdinal();
}
} else if (v1Obj instanceof Map) {
Map mapValue = (Map)v1Obj;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java
index 9bde5db..2884f8f 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasInstanceConverter.java
@@ -20,6 +20,7 @@ package org.apache.atlas.repository.converters;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.CreateUpdateEntitiesResult;
+import org.apache.atlas.RequestContextV1;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasClassification;
@@ -27,27 +28,17 @@ import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasEntity.AtlasEntitiesWithExtInfo;
import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.EntityMutationResponse;
-import org.apache.atlas.model.instance.EntityMutations;
import org.apache.atlas.model.instance.EntityMutations.EntityOperation;
import org.apache.atlas.model.instance.GuidMapping;
import org.apache.atlas.model.legacy.EntityResult;
+import org.apache.atlas.repository.store.graph.v1.EntityGraphRetriever;
+import org.apache.atlas.v1.model.instance.Referenceable;
+import org.apache.atlas.v1.model.instance.Struct;
import org.apache.atlas.repository.converters.AtlasFormatConverter.ConverterContext;
-import org.apache.atlas.services.MetadataService;
import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.ITypedReferenceableInstance;
-import org.apache.atlas.typesystem.ITypedStruct;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.Struct;
-import org.apache.atlas.typesystem.exception.EntityExistsException;
-import org.apache.atlas.typesystem.exception.EntityNotFoundException;
-import org.apache.atlas.typesystem.exception.TraitNotFoundException;
-import org.apache.atlas.typesystem.exception.TypeNotFoundException;
-import org.apache.atlas.typesystem.types.ValueConversionException;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.slf4j.Logger;
@@ -66,54 +57,55 @@ import java.util.Map;
@Singleton
@Component
public class AtlasInstanceConverter {
-
private static final Logger LOG = LoggerFactory.getLogger(AtlasInstanceConverter.class);
- private AtlasTypeRegistry typeRegistry;
-
- private AtlasFormatConverters instanceFormatters;
-
- private MetadataService metadataService;
+ private final AtlasTypeRegistry typeRegistry;
+ private final AtlasFormatConverters instanceFormatters;
+ private final EntityGraphRetriever entityGraphRetriever;
@Inject
- public AtlasInstanceConverter(AtlasTypeRegistry typeRegistry, AtlasFormatConverters instanceFormatters, MetadataService metadataService) {
- this.typeRegistry = typeRegistry;
- this.instanceFormatters = instanceFormatters;
- this.metadataService = metadataService;
+ public AtlasInstanceConverter(AtlasTypeRegistry typeRegistry, AtlasFormatConverters instanceFormatters) {
+ this.typeRegistry = typeRegistry;
+ this.instanceFormatters = instanceFormatters;
+ this.entityGraphRetriever = new EntityGraphRetriever(typeRegistry);
}
- public ITypedReferenceableInstance[] getITypedReferenceables(Collection<AtlasEntity> entities) throws AtlasBaseException {
- ITypedReferenceableInstance[] entitiesInOldFormat = new ITypedReferenceableInstance[entities.size()];
+ public Referenceable[] getReferenceables(Collection<AtlasEntity> entities) throws AtlasBaseException {
+ Referenceable[] ret = new Referenceable[entities.size()];
AtlasFormatConverter.ConverterContext ctx = new AtlasFormatConverter.ConverterContext();
+
for(Iterator<AtlasEntity> i = entities.iterator(); i.hasNext(); ) {
ctx.addEntity(i.next());
}
Iterator<AtlasEntity> entityIterator = entities.iterator();
for (int i = 0; i < entities.size(); i++) {
- ITypedReferenceableInstance typedInstance = getITypedReferenceable(entityIterator.next());
- entitiesInOldFormat[i] = typedInstance;
+ ret[i] = getReferenceable(entityIterator.next(), ctx);
}
- return entitiesInOldFormat;
+
+ return ret;
}
- public ITypedReferenceableInstance getITypedReferenceable(AtlasEntity entity) throws AtlasBaseException {
- try {
- return metadataService.getEntityDefinition(entity.getGuid());
- } catch (AtlasException e) {
- LOG.error("Exception while getting a typed reference for the entity ", e);
- throw toAtlasBaseException(e);
- }
+ public Referenceable getReferenceable(AtlasEntity entity) throws AtlasBaseException {
+ return getReferenceable(entity, new ConverterContext());
+ }
+
+ public Referenceable getReferenceable(String guid) throws AtlasBaseException {
+ AtlasEntity.AtlasEntityWithExtInfo entity = getAndCacheEntity(guid);
+
+ return getReferenceable(entity);
}
- public ITypedReferenceableInstance getITypedReferenceable(String guid) throws AtlasBaseException {
- try {
- return metadataService.getEntityDefinition(guid);
- } catch (AtlasException e) {
- LOG.error("Exception while getting a typed reference for the entity ", e);
- throw toAtlasBaseException(e);
+ public Referenceable getReferenceable(AtlasEntity.AtlasEntityWithExtInfo entity) throws AtlasBaseException {
+ AtlasFormatConverter.ConverterContext ctx = new AtlasFormatConverter.ConverterContext();
+
+ ctx.addEntity(entity.getEntity());
+ for(Map.Entry<String, AtlasEntity> entry : entity.getReferredEntities().entrySet()) {
+ ctx.addEntity(entry.getValue());
}
+
+ return getReferenceable(entity.getEntity(), ctx);
}
public Referenceable getReferenceable(AtlasEntity entity, final ConverterContext ctx) throws AtlasBaseException {
@@ -124,31 +116,28 @@ public class AtlasInstanceConverter {
return ref;
}
- public ITypedStruct getTrait(AtlasClassification classification) throws AtlasBaseException {
+ public Struct getTrait(AtlasClassification classification) throws AtlasBaseException {
AtlasFormatConverter converter = instanceFormatters.getConverter(TypeCategory.CLASSIFICATION);
AtlasType classificationType = typeRegistry.getType(classification.getTypeName());
Struct trait = (Struct)converter.fromV2ToV1(classification, classificationType, new ConverterContext());
- try {
- return metadataService.createTraitInstance(trait);
- } catch (AtlasException e) {
- LOG.error("Exception while getting a typed reference for the entity ", e);
- throw toAtlasBaseException(e);
- }
+ return trait;
}
- public AtlasClassification getClassification(IStruct classification) throws AtlasBaseException {
- AtlasFormatConverter converter = instanceFormatters.getConverter(TypeCategory.CLASSIFICATION);
+ public AtlasClassification toAtlasClassification(Struct classification) throws AtlasBaseException {
+ AtlasFormatConverter converter = instanceFormatters.getConverter(TypeCategory.CLASSIFICATION);
AtlasClassificationType classificationType = typeRegistry.getClassificationTypeByName(classification.getTypeName());
+
if (classificationType == null) {
throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_INVALID, TypeCategory.CLASSIFICATION.name(), classification.getTypeName());
}
- AtlasClassification ret = (AtlasClassification)converter.fromV1ToV2(classification, classificationType, new AtlasFormatConverter.ConverterContext());
+
+ AtlasClassification ret = (AtlasClassification)converter.fromV1ToV2(classification, classificationType, new AtlasFormatConverter.ConverterContext());
return ret;
}
- public AtlasEntitiesWithExtInfo toAtlasEntity(IReferenceableInstance referenceable) throws AtlasBaseException {
+ public AtlasEntitiesWithExtInfo toAtlasEntity(Referenceable referenceable) throws AtlasBaseException {
AtlasEntityFormatConverter converter = (AtlasEntityFormatConverter) instanceFormatters.getConverter(TypeCategory.ENTITY);
AtlasEntityType entityType = typeRegistry.getEntityTypeByName(referenceable.getTypeName());
@@ -156,13 +145,6 @@ public class AtlasInstanceConverter {
throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_INVALID, TypeCategory.ENTITY.name(), referenceable.getTypeName());
}
- // validate
- try {
- metadataService.validateAndConvertToTypedInstance(referenceable, entityType.getTypeName());
- } catch (AtlasException excp) {
- throw toAtlasBaseException(excp);
- }
-
ConverterContext ctx = new ConverterContext();
AtlasEntity entity = converter.fromV1ToV2(referenceable, entityType, ctx);
@@ -171,106 +153,59 @@ public class AtlasInstanceConverter {
return ctx.getEntities();
}
- public static EntityMutationResponse toEntityMutationResponse(EntityResult entityResult) {
-
- CreateUpdateEntitiesResult result = new CreateUpdateEntitiesResult();
- result.setEntityResult(entityResult);
- return toEntityMutationResponse(result);
- }
-
- public static EntityMutationResponse toEntityMutationResponse(CreateUpdateEntitiesResult result) {
- EntityMutationResponse response = new EntityMutationResponse();
- for (String guid : result.getCreatedEntities()) {
- AtlasEntityHeader header = new AtlasEntityHeader();
- header.setGuid(guid);
- response.addEntity(EntityMutations.EntityOperation.CREATE, header);
- }
-
- for (String guid : result.getUpdatedEntities()) {
- AtlasEntityHeader header = new AtlasEntityHeader();
- header.setGuid(guid);
- response.addEntity(EntityMutations.EntityOperation.UPDATE, header);
- }
-
- for (String guid : result.getDeletedEntities()) {
- AtlasEntityHeader header = new AtlasEntityHeader();
- header.setGuid(guid);
- response.addEntity(EntityMutations.EntityOperation.DELETE, header);
- }
- GuidMapping guidMapping = result.getGuidMapping();
- if(guidMapping != null) {
- response.setGuidAssignments(guidMapping.getGuidAssignments());
- }
- return response;
- }
-
- public static AtlasBaseException toAtlasBaseException(AtlasException e) {
- if (e instanceof EntityExistsException) {
- return new AtlasBaseException(AtlasErrorCode.INSTANCE_ALREADY_EXISTS, e.getMessage());
- }
-
- if ( e instanceof EntityNotFoundException || e instanceof TraitNotFoundException) {
- return new AtlasBaseException(AtlasErrorCode.INSTANCE_NOT_FOUND, e.getMessage());
- }
-
- if ( e instanceof TypeNotFoundException) {
- return new AtlasBaseException(AtlasErrorCode.TYPE_NAME_NOT_FOUND, e.getMessage());
- }
-
- if (e instanceof ValueConversionException) {
- return new AtlasBaseException(AtlasErrorCode.INVALID_VALUE, e, e.getMessage());
- }
-
- return new AtlasBaseException(AtlasErrorCode.BAD_REQUEST, e.getMessage());
- }
-
public AtlasEntity.AtlasEntitiesWithExtInfo toAtlasEntities(List<Referenceable> referenceables) throws AtlasBaseException {
if (LOG.isDebugEnabled()) {
- LOG.debug("==> toAtlasEntities");
+ LOG.debug("==> toAtlasEntities({})", referenceables);
}
AtlasFormatConverter.ConverterContext context = new AtlasFormatConverter.ConverterContext();
+
for (Referenceable referenceable : referenceables) {
AtlasEntity entity = fromV1toV2Entity(referenceable, context);
context.addEntity(entity);
}
+
+ AtlasEntity.AtlasEntitiesWithExtInfo ret = context.getEntities();
+
if (LOG.isDebugEnabled()) {
- LOG.debug("<== toAtlasEntities");
+ LOG.debug("<== toAtlasEntities({}): ret=", referenceables, ret);
}
- return context.getEntities();
+ return ret;
}
- public AtlasEntitiesWithExtInfo toAtlasEntities(String entitiesJson) throws AtlasBaseException, AtlasException {
- ITypedReferenceableInstance[] referenceables = metadataService.deserializeClassInstances(entitiesJson);
- AtlasEntityFormatConverter converter = (AtlasEntityFormatConverter) instanceFormatters.getConverter(TypeCategory.ENTITY);
- ConverterContext context = new ConverterContext();
- AtlasEntitiesWithExtInfo ret = null;
+ public AtlasEntitiesWithExtInfo toAtlasEntities(String[] jsonEntities) throws AtlasBaseException, AtlasException {
+ Referenceable[] referenceables = new Referenceable[jsonEntities.length];
- if (referenceables != null) {
- for (IReferenceableInstance referenceable : referenceables) {
- AtlasEntityType entityType = typeRegistry.getEntityTypeByName(referenceable.getTypeName());
+ for (int i = 0; i < jsonEntities.length; i++) {
+ referenceables[i] = AtlasType.fromV1Json(jsonEntities[i], Referenceable.class);
+ }
- if (entityType == null) {
- throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_INVALID, TypeCategory.ENTITY.name(), referenceable.getTypeName());
- }
+ AtlasEntityFormatConverter converter = (AtlasEntityFormatConverter) instanceFormatters.getConverter(TypeCategory.ENTITY);
+ ConverterContext context = new ConverterContext();
- AtlasEntity entity = converter.fromV1ToV2(referenceable, entityType, context);
+ for (Referenceable referenceable : referenceables) {
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName(referenceable.getTypeName());
- context.addEntity(entity);
+ if (entityType == null) {
+ throw new AtlasBaseException(AtlasErrorCode.TYPE_NAME_INVALID, TypeCategory.ENTITY.name(), referenceable.getTypeName());
}
- ret = context.getEntities();
+ AtlasEntity entity = converter.fromV1ToV2(referenceable, entityType, context);
+
+ context.addEntity(entity);
}
+ AtlasEntitiesWithExtInfo ret = context.getEntities();
+
return ret;
}
private AtlasEntity fromV1toV2Entity(Referenceable referenceable, AtlasFormatConverter.ConverterContext context) throws AtlasBaseException {
if (LOG.isDebugEnabled()) {
- LOG.debug("==> fromV1toV2Entity");
+ LOG.debug("==> fromV1toV2Entity({})", referenceable);
}
AtlasEntityFormatConverter converter = (AtlasEntityFormatConverter) instanceFormatters.getConverter(TypeCategory.ENTITY);
@@ -278,8 +213,9 @@ public class AtlasInstanceConverter {
AtlasEntity entity = converter.fromV1ToV2(referenceable, typeRegistry.getType(referenceable.getTypeName()), context);
if (LOG.isDebugEnabled()) {
- LOG.debug("<== fromV1toV2Entity");
+ LOG.debug("<== fromV1toV2Entity({}): {}", referenceable, entity);
}
+
return entity;
}
@@ -352,4 +288,24 @@ public class AtlasInstanceConverter {
return ret;
}
+
+
+ private AtlasEntity.AtlasEntityWithExtInfo getAndCacheEntity(String guid) throws AtlasBaseException {
+ RequestContextV1 context = RequestContextV1.get();
+ AtlasEntity.AtlasEntityWithExtInfo entityWithExtInfo = context.getInstanceV2(guid);
+
+ if (entityWithExtInfo == null) {
+ entityWithExtInfo = entityGraphRetriever.toAtlasEntityWithExtInfo(guid);
+
+ if (entityWithExtInfo != null) {
+ context.cache(entityWithExtInfo);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Cache miss -> GUID = {}", guid);
+ }
+ }
+ }
+
+ return entityWithExtInfo;
+ }
}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasObjectIdConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasObjectIdConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasObjectIdConverter.java
index f946b9c..a5b6d84 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasObjectIdConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasObjectIdConverter.java
@@ -19,18 +19,15 @@ package org.apache.atlas.repository.converters;
import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.instance.AtlasObjectId;
+import org.apache.atlas.v1.model.instance.Id;
+import org.apache.atlas.v1.model.instance.Referenceable;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IReferenceableInstance;
-import org.apache.atlas.typesystem.Referenceable;
-import org.apache.atlas.typesystem.persistence.Id;
-import org.apache.atlas.typesystem.persistence.StructInstance;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
@@ -54,10 +51,10 @@ public class AtlasObjectIdConverter extends AtlasAbstractFormatConverter {
if (v1Obj instanceof Id) {
Id id = (Id) v1Obj;
- ret = new AtlasObjectId(id._getId(), id.getTypeName());
- } else if (v1Obj instanceof IReferenceableInstance) {
- IReferenceableInstance refInst = (IReferenceableInstance) v1Obj;
- String guid = refInst.getId()._getId();
+ ret = new AtlasObjectId(id.getId(), id.getTypeName());
+ } else if (v1Obj instanceof Referenceable) {
+ Referenceable refInst = (Referenceable) v1Obj;
+ String guid = refInst.getId().getId();
ret = new AtlasObjectId(guid, refInst.getTypeName());
@@ -79,11 +76,10 @@ public class AtlasObjectIdConverter extends AtlasAbstractFormatConverter {
Id ret = null;
if (v2Obj != null) {
-
if (v2Obj instanceof Map) {
Map v2Map = (Map) v2Obj;
String idStr = (String)v2Map.get(AtlasObjectId.KEY_GUID);
- String typeName = type.getTypeName();
+ String typeName = (String)v2Map.get(AtlasObjectId.KEY_TYPENAME);
if (StringUtils.isEmpty(idStr)) {
throw new AtlasBaseException(AtlasErrorCode.INSTANCE_GUID_NOT_FOUND);
@@ -91,47 +87,33 @@ public class AtlasObjectIdConverter extends AtlasAbstractFormatConverter {
ret = new Id(idStr, 0, typeName);
} else if (v2Obj instanceof AtlasObjectId) { // transient-id
- ret = new Id(((AtlasObjectId) v2Obj).getGuid(), 0, type.getTypeName());
+ AtlasObjectId objId = (AtlasObjectId) v2Obj;
+
+ ret = new Id(objId.getGuid(), 0, objId.getTypeName());
} else if (v2Obj instanceof AtlasEntity) {
AtlasEntity entity = (AtlasEntity) v2Obj;
- ret = new Id(((AtlasObjectId) v2Obj).getGuid(), 0, type.getTypeName());
+
+ ret = new Id(entity.getGuid(), entity.getVersion() == null ? 0 : entity.getVersion().intValue(), entity.getTypeName());
} else {
throw new AtlasBaseException(AtlasErrorCode.TYPE_CATEGORY_INVALID, type.getTypeCategory().name());
}
}
+
return ret;
}
- private boolean hasAnyAssignedAttribute(IReferenceableInstance rInstance) {
+ private boolean hasAnyAssignedAttribute(org.apache.atlas.v1.model.instance.Referenceable rInstance) {
boolean ret = false;
- if (rInstance instanceof StructInstance) {
- StructInstance sInstance = (StructInstance) rInstance;
+ Map<String, Object> attributes = rInstance.getValues();
- Map<String, Object> attributes = null;
-
- try {
- attributes = sInstance.getValuesMap();
- } catch (AtlasException e) {
- // ignore
- }
-
- if (MapUtils.isNotEmpty(attributes)) {
- for (String attrName : attributes.keySet()) {
- try {
- if (sInstance.isValueSet(attrName)) {
- ret = true;
- break;
- }
- } catch (AtlasException e) {
- // ignore
- }
+ if (MapUtils.isNotEmpty(attributes)) {
+ for (Map.Entry<String, Object> attribute : attributes.entrySet()) {
+ if (attribute.getValue() != null) {
+ ret = true;
+ break;
}
}
- } else if (rInstance instanceof Referenceable) {
- Referenceable referenceable = (Referenceable) rInstance;
-
- ret = MapUtils.isNotEmpty(referenceable.getValuesMap());
}
return ret;
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/AtlasStructFormatConverter.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasStructFormatConverter.java b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasStructFormatConverter.java
index 6b6ee01..70b23c5 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/AtlasStructFormatConverter.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/AtlasStructFormatConverter.java
@@ -18,21 +18,24 @@
package org.apache.atlas.repository.converters;
import org.apache.atlas.AtlasErrorCode;
-import org.apache.atlas.AtlasException;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
+import org.apache.atlas.model.instance.AtlasEntity;
+import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.instance.AtlasStruct;
-import org.apache.atlas.type.AtlasStructType;
+import org.apache.atlas.v1.model.instance.Struct;
+import org.apache.atlas.type.*;
+import org.apache.atlas.type.AtlasBuiltInTypes.AtlasObjectIdType;
import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
-import org.apache.atlas.type.AtlasType;
-import org.apache.atlas.type.AtlasTypeRegistry;
-import org.apache.atlas.typesystem.IStruct;
-import org.apache.atlas.typesystem.Struct;
import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
@@ -64,19 +67,12 @@ public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
} else {
ret = new AtlasStruct(type.getTypeName());
}
- } else if (v1Obj instanceof IStruct) {
- IStruct struct = (IStruct) v1Obj;
- Map<String, Object> v1Attribs = null;
-
- try {
- v1Attribs = struct.getValuesMap();
- } catch (AtlasException excp) {
- LOG.error("IStruct.getValuesMap() failed", excp);
- }
+ } else if (v1Obj instanceof Struct) {
+ Struct struct = (Struct) v1Obj;
- ret = new AtlasStruct(type.getTypeName(), fromV1ToV2(structType, v1Attribs, converterContext));
+ ret = new AtlasStruct(type.getTypeName(), fromV1ToV2(structType, struct.getValues(), converterContext));
} else {
- throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "Map or IStruct", v1Obj.getClass().getCanonicalName());
+ throw new AtlasBaseException(AtlasErrorCode.UNEXPECTED_TYPE, "Map or Struct", v1Obj.getClass().getCanonicalName());
}
}
@@ -118,7 +114,8 @@ public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
}
protected Map<String, Object> fromV2ToV1(AtlasStructType structType, Map<String, Object> attributes, ConverterContext context) throws AtlasBaseException {
- Map<String, Object> ret = null;
+ Map<String, Object> ret = null;
+ boolean isEntityType = structType instanceof AtlasEntityType;
if (MapUtils.isNotEmpty(attributes)) {
ret = new HashMap<>();
@@ -132,13 +129,80 @@ public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
continue;
}
- AtlasType attrType = attr.getAttributeType();
+ AtlasType attrType = attr.getAttributeType();
+ AtlasFormatConverter attrConverter = converterRegistry.getConverter(attrType.getTypeCategory());
+ Object v2Value = attributes.get(attr.getName());
+
+ if (v2Value != null && isEntityType && attr.isOwnedRef()) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("{}: is ownedRef, attrType={}", attr.getQualifiedName(), attrType.getTypeName());
+ }
+
+ if (attrType instanceof AtlasArrayType) {
+ AtlasArrayType arrayType = (AtlasArrayType) attrType;
+ AtlasType elemType = arrayType.getElementType();
+ String elemTypeName;
+
+ if (elemType instanceof AtlasObjectIdType) {
+ elemTypeName = ((AtlasObjectIdType) elemType).getObjectType();
+ } else {
+ elemTypeName = elemType.getTypeName();
+ }
+
+ AtlasEntityType entityType = typeRegistry.getEntityTypeByName(elemTypeName);;
+
+ if (entityType != null) {
+ Collection<?> arrayValue = (Collection<?>) v2Value;
+ List<AtlasEntity> entities = new ArrayList<>(arrayValue.size());
+
+ for (Object arrayElem : arrayValue) {
+ String entityGuid = getGuid(arrayElem);
+ AtlasEntity entity = StringUtils.isNotEmpty(entityGuid) ? context.getById(entityGuid) : null;
+
+ if (entity != null) {
+ entities.add(entity);
+ } else {
+ LOG.warn("{}: not replacing objIdList with entityList - entity not found guid={}", attr.getQualifiedName(), entityGuid);
+
+ entities = null;
+ break;
+ }
+ }
+
+ if (entities != null) {
+ v2Value = entities;
+ attrType = new AtlasArrayType(entityType);
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("{}: replaced objIdList with entityList", attr.getQualifiedName());
+ }
+ }
+ } else {
+ LOG.warn("{}: not replacing objIdList with entityList - elementType {} is not an entityType", attr.getQualifiedName(), elemTypeName);
+ }
+ } else if (attrType instanceof AtlasObjectIdType) {
+ String entityGuid = getGuid(v2Value);
+ AtlasEntity entity = StringUtils.isNotEmpty(entityGuid) ? context.getById(entityGuid) : null;
+ AtlasEntityType entityType = entity != null ? typeRegistry.getEntityTypeByName(entity.getTypeName()) : null;
+
+ if (entity != null && entityType != null) {
+ v2Value = entity;
+ attrType = entityType;
+ attrConverter = converterRegistry.getConverter(attrType.getTypeCategory());
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("{}: replaced objId with entity guid={}", attr.getQualifiedName(), entityGuid);
+ }
+ } else {
+ LOG.warn("{}: not replacing objId with entity - entity not found guid={}", attr.getQualifiedName(), entityGuid);
+ }
+ } else {
+ LOG.warn("{}: not replacing objId with entity - unexpected attribute-type {}", attr.getQualifiedName(), attrType.getTypeName());
+ }
+ }
- Object v2Value = attributes.get(attr.getName());
- Object v1Value;
+ Object v1Value = attrConverter.fromV2ToV1(v2Value, attrType, context);
- AtlasFormatConverter attrConverter = converterRegistry.getConverter(attrType.getTypeCategory());
- v1Value = attrConverter.fromV2ToV1(v2Value, attrType, context);
ret.put(attr.getName(), v1Value);
}
}
@@ -146,6 +210,24 @@ public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
return ret;
}
+ private String getGuid(Object obj) {
+ final String ret;
+
+ if (obj instanceof AtlasObjectId) {
+ AtlasObjectId objId = (AtlasObjectId) obj;
+
+ ret = objId.getGuid();
+ } else if (obj instanceof Map) {
+ Map v2Map = (Map) obj;
+
+ ret = (String)v2Map.get(AtlasObjectId.KEY_GUID);
+ } else {
+ ret = null;
+ }
+
+ return ret;
+ }
+
protected Map<String, Object> fromV1ToV2(AtlasStructType structType, Map attributes, ConverterContext context) throws AtlasBaseException {
Map<String, Object> ret = null;
@@ -162,8 +244,7 @@ public class AtlasStructFormatConverter extends AtlasAbstractFormatConverter {
continue;
}
- AtlasType attrType = attr.getAttributeType();
-
+ AtlasType attrType = attr.getAttributeType();
AtlasFormatConverter attrConverter = converterRegistry.getConverter(attrType.getTypeCategory());
Object v1Value = attributes.get(attr.getName());
Object v2Value = attrConverter.fromV1ToV2(v1Value, attrType, context);
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/main/java/org/apache/atlas/repository/converters/TypeConverterUtil.java
----------------------------------------------------------------------
diff --git a/repository/src/main/java/org/apache/atlas/repository/converters/TypeConverterUtil.java b/repository/src/main/java/org/apache/atlas/repository/converters/TypeConverterUtil.java
index 7902100..33f092e 100644
--- a/repository/src/main/java/org/apache/atlas/repository/converters/TypeConverterUtil.java
+++ b/repository/src/main/java/org/apache/atlas/repository/converters/TypeConverterUtil.java
@@ -24,10 +24,7 @@ import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.C
import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef.CONSTRAINT_PARAM_ATTRIBUTE;
import static org.apache.atlas.type.AtlasTypeUtil.isArrayType;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Set;
+import java.util.*;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.typedef.AtlasClassificationDef;
@@ -40,6 +37,13 @@ import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinali
import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef;
import org.apache.atlas.model.typedef.AtlasTypeDefHeader;
import org.apache.atlas.model.typedef.AtlasTypesDef;
+import org.apache.atlas.v1.model.typedef.AttributeDefinition;
+import org.apache.atlas.v1.model.typedef.ClassTypeDefinition;
+import org.apache.atlas.v1.model.typedef.EnumTypeDefinition;
+import org.apache.atlas.v1.model.typedef.Multiplicity;
+import org.apache.atlas.v1.model.typedef.StructTypeDefinition;
+import org.apache.atlas.v1.model.typedef.TraitTypeDefinition;
+import org.apache.atlas.v1.model.typedef.TypesDef;
import org.apache.atlas.repository.store.graph.v1.AtlasStructDefStoreV1;
import org.apache.atlas.type.AtlasClassificationType;
import org.apache.atlas.type.AtlasEntityType;
@@ -49,25 +53,12 @@ import org.apache.atlas.type.AtlasStructType.AtlasAttribute;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.atlas.type.AtlasTypeUtil;
-import org.apache.atlas.typesystem.TypesDef;
-import org.apache.atlas.typesystem.json.TypesSerialization;
-import org.apache.atlas.typesystem.types.AttributeDefinition;
-import org.apache.atlas.typesystem.types.ClassType;
-import org.apache.atlas.typesystem.types.EnumTypeDefinition;
-import org.apache.atlas.typesystem.types.EnumValue;
-import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.atlas.typesystem.types.Multiplicity;
-import org.apache.atlas.typesystem.types.StructTypeDefinition;
-import org.apache.atlas.typesystem.types.TraitType;
-import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.atlas.v1.model.typedef.EnumTypeDefinition.EnumValue;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableSet;
-
public final class TypeConverterUtil {
private TypeConverterUtil() {}
@@ -92,75 +83,58 @@ public final class TypeConverterUtil {
}
private static TypesDef enumToTypesDef(AtlasEnumType enumType) {
- TypesDef ret = null;
-
AtlasEnumDef enumDef = enumType.getEnumDef();
- String enumName = enumDef.getName();
- String enumDesc = enumDef.getDescription();
- String enumVersion = enumDef.getTypeVersion();
- EnumValue[] enumValues = getEnumValues(enumDef.getElementDefs());
+ String enumName = enumDef.getName();
+ String enumDesc = enumDef.getDescription();
+ String enumVersion = enumDef.getTypeVersion();
+ List<EnumValue> enumValues = getEnumValues(enumDef.getElementDefs());
- if (enumName != null && enumValues != null && enumValues.length > 0) {
- EnumTypeDefinition enumTypeDef = new EnumTypeDefinition(enumName, enumDesc, enumVersion, enumValues);
+ EnumTypeDefinition enumTypeDef = new EnumTypeDefinition(enumName, enumDesc, enumVersion, enumValues);
- ret = TypesUtil.getTypesDef(ImmutableList.of(enumTypeDef),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
- }
+ TypesDef ret = new TypesDef(Arrays.asList(enumTypeDef), null, null, null);
return ret;
}
- private static TypesDef structToTypesDef(AtlasStructType structType, AtlasTypeRegistry registry)
- throws AtlasBaseException {
- String typeName = structType.getStructDef().getName();
- String typeDesc = structType.getStructDef().getDescription();
- String typeVersion = structType.getStructDef().getTypeVersion();
- AttributeDefinition[] attributes = getAttributes(structType, registry);
- StructTypeDefinition structTypeDef = TypesUtil.createStructTypeDef(typeName, typeDesc, typeVersion, attributes);
+ private static TypesDef structToTypesDef(AtlasStructType structType, AtlasTypeRegistry registry) {
+ String typeName = structType.getStructDef().getName();
+ String typeDesc = structType.getStructDef().getDescription();
+ String typeVersion = structType.getStructDef().getTypeVersion();
+ List<AttributeDefinition> attributes = getAttributes(structType, registry);
- TypesDef ret = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.of(structTypeDef),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
+ StructTypeDefinition structTypeDef = new StructTypeDefinition(typeName, typeDesc, typeVersion, attributes);
+
+ TypesDef ret = new TypesDef(null, Arrays.asList(structTypeDef), null, null);
return ret;
}
- private static TypesDef entityToTypesDef(AtlasEntityType entityType, AtlasTypeRegistry registry)
- throws AtlasBaseException {
- String typeName = entityType.getEntityDef().getName();
- String typeDesc = entityType.getEntityDef().getDescription();
- String typeVersion = entityType.getEntityDef().getTypeVersion();
- ImmutableSet superTypes = ImmutableSet.copyOf(entityType.getEntityDef().getSuperTypes());
- AttributeDefinition[] attributes = getAttributes(entityType, registry);
-
- HierarchicalTypeDefinition<ClassType> classType = TypesUtil.createClassTypeDef(typeName, typeDesc, typeVersion,
- superTypes, attributes);
- TypesDef ret = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
- ImmutableList.of(classType));
+ private static TypesDef entityToTypesDef(AtlasEntityType entityType, AtlasTypeRegistry registry) {
+ String typeName = entityType.getEntityDef().getName();
+ String typeDesc = entityType.getEntityDef().getDescription();
+ String typeVersion = entityType.getEntityDef().getTypeVersion();
+ Set<String> superTypes = entityType.getEntityDef().getSuperTypes();
+ List<AttributeDefinition> attributes = getAttributes(entityType, registry);
+
+ ClassTypeDefinition classTypeDef = new ClassTypeDefinition(typeName, typeDesc, typeVersion, attributes, superTypes);
+
+ TypesDef ret = new TypesDef(null, null, null, Arrays.asList(classTypeDef));
return ret;
}
- private static TypesDef classificationToTypesDef(AtlasClassificationType classificationType,
- AtlasTypeRegistry registry) throws AtlasBaseException {
- String typeName = classificationType.getClassificationDef().getName();
- String typeDesc = classificationType.getClassificationDef().getDescription();
- String typeVersion = classificationType.getClassificationDef().getTypeVersion();
- ImmutableSet superTypes = ImmutableSet.copyOf(classificationType.getClassificationDef().getSuperTypes());
- AttributeDefinition[] attributes = getAttributes(classificationType, registry);
-
- HierarchicalTypeDefinition traitType = TypesUtil.createTraitTypeDef(typeName, typeDesc, typeVersion, superTypes,
- attributes);
- TypesDef ret = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(),
- ImmutableList.<StructTypeDefinition>of(),
- ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(traitType),
- ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
+ private static TypesDef classificationToTypesDef(AtlasClassificationType classificationType, AtlasTypeRegistry registry) {
+ String typeName = classificationType.getClassificationDef().getName();
+ String typeDesc = classificationType.getClassificationDef().getDescription();
+ String typeVersion = classificationType.getClassificationDef().getTypeVersion();
+ Set<String> superTypes = new HashSet<>(classificationType.getClassificationDef().getSuperTypes());
+ List<AttributeDefinition> attributes = getAttributes(classificationType, registry);
+
+ TraitTypeDefinition traitTypeDef = new TraitTypeDefinition(typeName, typeDesc, typeVersion, attributes, superTypes);
+
+ TypesDef ret = new TypesDef(null, null, Arrays.asList(traitTypeDef), null);
+
return ret;
}
@@ -174,24 +148,24 @@ public final class TypeConverterUtil {
throw new AtlasBaseException(INVALID_TYPE_DEFINITION, typeDefinition);
}
- TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
- if (CollectionUtils.isNotEmpty(typesDef.enumTypesAsJavaList())) {
- List<AtlasEnumDef> enumDefs = toAtlasEnumDefs(typesDef.enumTypesAsJavaList());
+ TypesDef typesDef = AtlasType.fromV1Json(typeDefinition, TypesDef.class);
+ if (CollectionUtils.isNotEmpty(typesDef.getEnumTypes())) {
+ List<AtlasEnumDef> enumDefs = toAtlasEnumDefs(typesDef.getEnumTypes());
ret.setEnumDefs(enumDefs);
}
- if (CollectionUtils.isNotEmpty(typesDef.structTypesAsJavaList())) {
- List<AtlasStructDef> structDefs = toAtlasStructDefs(typesDef.structTypesAsJavaList());
+ if (CollectionUtils.isNotEmpty(typesDef.getStructTypes())) {
+ List<AtlasStructDef> structDefs = toAtlasStructDefs(typesDef.getStructTypes());
ret.setStructDefs(structDefs);
}
- if (CollectionUtils.isNotEmpty(typesDef.classTypesAsJavaList())) {
- List<AtlasEntityDef> entityDefs = toAtlasEntityDefs(typesDef.classTypesAsJavaList(), registry);
+ if (CollectionUtils.isNotEmpty(typesDef.getClassTypes())) {
+ List<AtlasEntityDef> entityDefs = toAtlasEntityDefs(typesDef.getClassTypes(), registry);
ret.setEntityDefs(entityDefs);
}
- if (CollectionUtils.isNotEmpty(typesDef.traitTypesAsJavaList())) {
- List<AtlasClassificationDef> classificationDefs = toAtlasClassificationDefs(typesDef.traitTypesAsJavaList());
+ if (CollectionUtils.isNotEmpty(typesDef.getTraitTypes())) {
+ List<AtlasClassificationDef> classificationDefs = toAtlasClassificationDefs(typesDef.getTraitTypes());
ret.setClassificationDefs(classificationDefs);
}
@@ -203,7 +177,7 @@ public final class TypeConverterUtil {
return ret;
}
- public static ImmutableList<String> getTypeNames(List<AtlasTypeDefHeader> atlasTypesDefs) {
+ public static List<String> getTypeNames(List<AtlasTypeDefHeader> atlasTypesDefs) {
List<String> ret = new ArrayList<String>();
if (CollectionUtils.isNotEmpty(atlasTypesDefs)) {
for (AtlasTypeDefHeader atlasTypesDef : atlasTypesDefs) {
@@ -211,7 +185,7 @@ public final class TypeConverterUtil {
}
}
- return ImmutableList.copyOf(ret);
+ return ret;
}
public static List<String> getTypeNames(AtlasTypesDef typesDef) {
@@ -224,10 +198,10 @@ public final class TypeConverterUtil {
for (EnumTypeDefinition enumType : enumTypeDefinitions) {
AtlasEnumDef enumDef = new AtlasEnumDef();
- enumDef.setName(enumType.name);
- enumDef.setDescription(enumType.description);
- enumDef.setTypeVersion(enumType.version);
- enumDef.setElementDefs(getAtlasEnumElementDefs(enumType.enumValues));
+ enumDef.setName(enumType.getName());
+ enumDef.setDescription(enumType.getDescription());
+ enumDef.setTypeVersion(enumType.getVersion());
+ enumDef.setElementDefs(getAtlasEnumElementDefs(enumType.getEnumValues()));
ret.add(enumDef);
}
@@ -235,80 +209,65 @@ public final class TypeConverterUtil {
return ret;
}
- private static List<AtlasStructDef> toAtlasStructDefs(List<StructTypeDefinition> structTypeDefinitions)
- throws AtlasBaseException {
- List<AtlasStructDef> ret = new ArrayList<AtlasStructDef>();
+ private static List<AtlasStructDef> toAtlasStructDefs(List<StructTypeDefinition> structTypeDefinitions) {
+ List<AtlasStructDef> ret = new ArrayList<>();
for (StructTypeDefinition structType : structTypeDefinitions) {
- AtlasStructDef structDef = new AtlasStructDef();
List<AtlasAttributeDef> attrDefs = new ArrayList<AtlasAttributeDef>();
- structDef.setName(structType.typeName);
- structDef.setDescription(structType.typeDescription);
- structDef.setTypeVersion(structType.typeVersion);
-
- AttributeDefinition[] attrDefinitions = structType.attributeDefinitions;
- for (AttributeDefinition attrDefinition : attrDefinitions) {
- attrDefs.add(toAtlasAttributeDef(attrDefinition));
+ if (CollectionUtils.isNotEmpty(structType.getAttributeDefinitions())) {
+ for (AttributeDefinition attrDefinition : structType.getAttributeDefinitions()) {
+ attrDefs.add(toAtlasAttributeDef(attrDefinition));
+ }
}
- structDef.setAttributeDefs(attrDefs);
+ AtlasStructDef structDef = new AtlasStructDef(structType.getTypeName(), structType.getTypeDescription(), structType.getTypeVersion(), attrDefs);
+
ret.add(structDef);
}
return ret;
}
- private static List<AtlasClassificationDef> toAtlasClassificationDefs(List<HierarchicalTypeDefinition<TraitType>> traitTypeDefinitions)
- throws AtlasBaseException {
- List<AtlasClassificationDef> ret = new ArrayList<AtlasClassificationDef>();
+ private static List<AtlasClassificationDef> toAtlasClassificationDefs(List<TraitTypeDefinition> traitTypeDefinitions) {
+ List<AtlasClassificationDef> ret = new ArrayList<>();
- for (HierarchicalTypeDefinition<TraitType> traitType : traitTypeDefinitions) {
- AtlasClassificationDef classifDef = new AtlasClassificationDef();
+ for (TraitTypeDefinition traitType : traitTypeDefinitions) {
List<AtlasAttributeDef> attrDefs = new ArrayList<AtlasAttributeDef>();
- classifDef.setName(traitType.typeName);
- classifDef.setDescription(traitType.typeDescription);
- classifDef.setTypeVersion(traitType.typeVersion);
- classifDef.setSuperTypes(traitType.superTypes);
-
- AttributeDefinition[] attrDefinitions = traitType.attributeDefinitions;
- for (AttributeDefinition attrDefinition : attrDefinitions) {
- attrDefs.add(toAtlasAttributeDef(attrDefinition));
+ if (CollectionUtils.isNotEmpty(traitType.getAttributeDefinitions())) {
+ for (AttributeDefinition attrDefinition : traitType.getAttributeDefinitions()) {
+ attrDefs.add(toAtlasAttributeDef(attrDefinition));
+ }
}
- classifDef.setAttributeDefs(attrDefs);
+ AtlasClassificationDef classifDef = new AtlasClassificationDef(traitType.getTypeName(), traitType.getTypeDescription(), traitType.getTypeVersion(), attrDefs, traitType.getSuperTypes());
+
ret.add(classifDef);
}
return ret;
}
- private static List<AtlasEntityDef> toAtlasEntityDefs(List<HierarchicalTypeDefinition<ClassType>> classTypeDefinitions,
- AtlasTypeRegistry registry) throws AtlasBaseException {
- List<AtlasEntityDef> atlasEntityDefs = new ArrayList<AtlasEntityDef>();
+ private static List<AtlasEntityDef> toAtlasEntityDefs(List<ClassTypeDefinition> classTypeDefinitions, AtlasTypeRegistry registry) {
+ List<AtlasEntityDef> ret = new ArrayList<>();
- for (HierarchicalTypeDefinition<ClassType> classType : classTypeDefinitions) {
+ for (ClassTypeDefinition classType : classTypeDefinitions) {
List<AtlasAttributeDef> attrDefs = new ArrayList<AtlasAttributeDef>();
- AtlasEntityDef atlasEntityDef = new AtlasEntityDef();
- String classTypeDefName = classType.typeName;
-
- atlasEntityDef.setName(classTypeDefName);
- atlasEntityDef.setDescription(classType.typeDescription);
- atlasEntityDef.setTypeVersion(classType.typeVersion);
- atlasEntityDef.setSuperTypes(classType.superTypes);
-
- AttributeDefinition[] attrDefinitions = classType.attributeDefinitions;
- for (AttributeDefinition oldAttr : attrDefinitions) {
- AtlasAttributeDef newAttr = toAtlasAttributeDef(oldAttr);
- attrDefs.add(newAttr);
+
+ if (CollectionUtils.isNotEmpty(classType.getAttributeDefinitions())) {
+ for (AttributeDefinition oldAttr : classType.getAttributeDefinitions()) {
+ AtlasAttributeDef newAttr = toAtlasAttributeDef(oldAttr);
+ attrDefs.add(newAttr);
+ }
}
- atlasEntityDef.setAttributeDefs(attrDefs);
- atlasEntityDefs.add(atlasEntityDef);
+ AtlasEntityDef entityDef = new AtlasEntityDef(classType.getTypeName(), classType.getTypeDescription(), classType.getTypeVersion(), attrDefs, classType.getSuperTypes());
+
+ ret.add(entityDef);
}
- return atlasEntityDefs;
+ return ret;
}
private static String getArrayTypeName(String attrType) {
@@ -323,17 +282,17 @@ public final class TypeConverterUtil {
return ret;
}
- private static List<AtlasEnumElementDef> getAtlasEnumElementDefs(EnumValue[] enums) {
- List<AtlasEnumElementDef> ret = new ArrayList<AtlasEnumElementDef>();
+ private static List<AtlasEnumElementDef> getAtlasEnumElementDefs(List<EnumValue> enums) {
+ List<AtlasEnumElementDef> ret = new ArrayList<>();
for (EnumValue enumElem : enums) {
- ret.add(new AtlasEnumElementDef(enumElem.value, null, enumElem.ordinal));
+ ret.add(new AtlasEnumElementDef(enumElem.getValue(), null, enumElem.getOrdinal()));
}
return ret;
}
- private static EnumValue[] getEnumValues(List<AtlasEnumElementDef> enumDefs) {
+ private static List<EnumValue> getEnumValues(List<AtlasEnumElementDef> enumDefs) {
List<EnumValue> ret = new ArrayList<EnumValue>();
if (CollectionUtils.isNotEmpty(enumDefs)) {
@@ -344,32 +303,30 @@ public final class TypeConverterUtil {
}
}
- return ret.toArray(new EnumValue[ret.size()]);
+ return ret;
}
public static AtlasAttributeDef toAtlasAttributeDef(final AttributeDefinition attrDefinition) {
- AtlasAttributeDef ret = new AtlasAttributeDef();
+ AtlasAttributeDef ret = new AtlasAttributeDef(attrDefinition.getName(), attrDefinition.getDataTypeName());
- ret.setName(attrDefinition.name);
- ret.setTypeName(attrDefinition.dataTypeName);
- ret.setIsIndexable(attrDefinition.isIndexable);
- ret.setIsUnique(attrDefinition.isUnique);
- if (attrDefinition.isComposite) {
+ ret.setIsIndexable(attrDefinition.getIsIndexable());
+ ret.setIsUnique(attrDefinition.getIsUnique());
+ if (attrDefinition.getIsComposite()) {
ret.addConstraint(new AtlasConstraintDef(CONSTRAINT_TYPE_OWNED_REF));
}
- if (StringUtils.isNotBlank(attrDefinition.reverseAttributeName)) {
+ if (StringUtils.isNotBlank(attrDefinition.getReverseAttributeName())) {
ret.addConstraint(new AtlasConstraintDef(CONSTRAINT_TYPE_INVERSE_REF,
new HashMap<String, Object>() {{
- put(CONSTRAINT_PARAM_ATTRIBUTE, attrDefinition.reverseAttributeName);
+ put(CONSTRAINT_PARAM_ATTRIBUTE, attrDefinition.getReverseAttributeName());
}}));
}
// Multiplicity attribute mapping
- Multiplicity multiplicity = attrDefinition.multiplicity;
- int minCount = multiplicity.lower;
- int maxCount = multiplicity.upper;
- boolean isUnique = multiplicity.isUnique;
+ Multiplicity multiplicity = attrDefinition.getMultiplicity();
+ int minCount = multiplicity.getLower();
+ int maxCount = multiplicity.getUpper();
+ boolean isUnique = multiplicity.getIsUnique();
if (minCount == 0) {
ret.setIsOptional(true);
@@ -395,7 +352,7 @@ public final class TypeConverterUtil {
return ret;
}
- private static AttributeDefinition[] getAttributes(AtlasStructType structType, AtlasTypeRegistry registry) throws AtlasBaseException {
+ private static List<AttributeDefinition> getAttributes(AtlasStructType structType, AtlasTypeRegistry registry) {
List<AttributeDefinition> ret = new ArrayList<>();
List<AtlasAttributeDef> attrDefs = structType.getStructDef().getAttributeDefs();
@@ -403,10 +360,12 @@ public final class TypeConverterUtil {
for (AtlasAttributeDef attrDef : attrDefs) {
AtlasAttribute attribute = structType.getAttribute(attrDef.getName());
- ret.add(AtlasStructDefStoreV1.toAttributeDefintion(attribute));
+ AttributeDefinition oldAttrDef = AtlasStructDefStoreV1.toAttributeDefintion(attribute);
+
+ ret.add(new AttributeDefinition(oldAttrDef.getName(), oldAttrDef.getDataTypeName(), new Multiplicity(oldAttrDef.getMultiplicity()), oldAttrDef.getIsComposite(), oldAttrDef.getIsUnique(), oldAttrDef.getIsIndexable(), oldAttrDef.getReverseAttributeName()));
}
}
- return ret.toArray(new AttributeDefinition[ret.size()]);
+ return ret;
}
}
[14/42] atlas git commit: ATLAS-2251: Remove TypeSystem and related
implementation, to avoid unncessary duplicate of type details in cache
Posted by ma...@apache.org.
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala b/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
deleted file mode 100755
index 918f327..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.DBSandboxer
-import org.apache.atlas.query.Expressions._
-import org.apache.atlas.repository.BaseTest
-import org.testng.annotations.{BeforeMethod, Listeners, Test}
-
-class ExpressionTest extends BaseTest {
-
- @BeforeMethod
- override def setup {
- super.setup
-
- QueryTestsUtils.setupTypes
-
- }
-
- @Test def testClass: Unit = {
- val e = QueryProcessor.validate(_class("DB"))
- println(e)
- }
-
- @Test def testFilter: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))))
- println(e)
- }
-
- @Test def testSelect: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))).
- select(id("name"), id("owner")))
- println(e)
- }
-
- @Test def testNegTypeTest: Unit = {
- try {
- val e = QueryProcessor.validate(_class("DB").where(id("name")))
- println(e)
- } catch {
- case e: ExpressionException if e.getMessage.endsWith("expression: DB where name") => ()
- }
- }
-
- @Test def testIsTrait: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(isTrait("JdbcAccess")))
- println(e)
- }
-
- @Test def testIsTraitNegative: Unit = {
- try {
- val e = QueryProcessor.validate(_class("DB").where(isTrait("Jdb")))
- println(e)
- } catch {
- case e: ExpressionException if e.getMessage.endsWith("not a TraitType, expression: is Jdb") => ()
- }
- }
-
- @Test def testhasField: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(hasField("name")))
- println(e)
- }
-
- @Test def testHasFieldNegative: Unit = {
- try {
- val e = QueryProcessor.validate(_class("DB").where(hasField("nam")))
- println(e)
- } catch {
- case e: ExpressionException if e.getMessage.endsWith("not a TraitType, expression: is Jdb") => ()
- }
- }
-
- @Test def testFieldReference: Unit = {
- val e = QueryProcessor.validate(_class("DB").field("Table"))
- println(e)
- }
-
- @Test def testNegFieldReference: Unit = {
- try {
- val e = QueryProcessor.validate(_class("DB").where(_class("LoadProcess").hasField("name")))
- println(e)
- } catch {
- case e: ExpressionException
- if e.getMessage.endsWith("srcType of field doesn't match input type, expression: LoadProcess has name") => ()
- }
- }
-
- @Test def testFieldReferenceRedundant: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(_class("DB").hasField("name")))
- println(e)
- }
-
- @Test def testBackReference: Unit = {
- val e = QueryProcessor.validate(
- _class("DB").as("db1").field("Table").where(id("db1").field("name").`=`(string("Reporting"))))
- println(e)
- }
-
- @Test def testArith: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))).
- select(id("name"), id("createTime") + int(1)))
- println(e)
- }
-
- @Test def testComparisonLogical: Unit = {
- val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting")).
- and(id("createTime") + int(1) > int(0))))
- println(e)
- }
-
- @Test def testJoinAndSelect1: Unit = {
- val e = QueryProcessor.validate(
- _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
- .and(id("db1").field("name").`=`(string("Reporting")))).select(id("db1").field("name").as("dbName"),
- id("tab").field("name").as("tabName"))
- )
- println(e)
- }
-
- @Test def testJoinAndSelect2: Unit = {
- val e = QueryProcessor.validate(
- _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
- .or(id("db1").field("name").`=`(string("Reporting"))))
- .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName"))
- )
- println(e)
- }
-
- @Test def testJoinAndSelect3: Unit = {
- val e = QueryProcessor.validate(
- _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
- .and(id("db1").field("name").`=`(string("Reporting")))
- .or(id("db1").hasField("owner")))
- .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName"))
- )
- println(e)
- }
-
- @Test def testJoinAndSelect4: Unit = {
- val e = QueryProcessor.validate(
- _class("DB") as "db1" join "Table" as "tab" where (
- id("db1").field("createTime") + int(1) > int(0) and
- (id("db1") `.` "name" `=` string("Reporting")) or
- (id("db1") hasField "owner")
- ) select(
- id("db1") `.` "name" as "dbName", id("tab") `.` "name" as "tabName"
- )
- )
- println(e)
- }
-
- @Test def testLineageAll: Unit = {
- val e = QueryProcessor.validate(_class("Table").loop(id("LoadProcess").field("outputTable")))
- println(e)
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala b/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
deleted file mode 100755
index a948d16..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
+++ /dev/null
@@ -1,1068 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy
-import org.apache.atlas.query.Expressions._
-import org.apache.atlas.repository.graph.{AtlasGraphProvider, GraphBackedMetadataRepository}
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.testng.annotations._
-import org.apache.atlas.repository.graph.AtlasGraphProvider
-import org.apache.atlas.{DBSandboxer, TestUtils}
-import org.apache.atlas.graph.GraphSandboxUtil
-
-class GremlinTest extends BaseGremlinTest {
-
- var g: AtlasGraph[_,_] = null
- var gp: GraphPersistenceStrategies = null;
-
- @BeforeMethod
- def resetRequestContext() {
- TestUtils.resetRequestContext()
- }
-
- @BeforeClass
- def beforeAll() {
- TypeSystem.getInstance().reset()
- var repo = new GraphBackedMetadataRepository(null, new AtlasGraphProvider().get())
- TestUtils.setupGraphProvider(repo)
- //force graph to be initialized first
- AtlasGraphProvider.getGraphInstance()
-
- //create types and indices up front. Without this, some of the property keys (particularly __traitNames and __superTypes)
- //get ended up created implicitly with some graph backends with the wrong multiplicity. This also makes the queries
- //we execute perform better :-)
- QueryTestsUtils.setupTypesAndIndices()
-
- gp = new DefaultGraphPersistenceStrategy(repo)
- g = QueryTestsUtils.setupTestGraph(repo)
- }
-
- @AfterClass
- def afterAll() {
- AtlasGraphProvider.cleanup()
- }
-
-
- @Test def testClass {
- val r = QueryProcessor.evaluate(_class("DB"), g, gp)
- validateJson(r, """{
- | "query": "DB",
- | "dataType": {
- | "superTypes": [
- |
- | ],
- | "hierarchicalMetaTypeName": "org.apache.atlas.typesystem.types.ClassType",
- | "typeName": "DB",
- | "attributeDefinitions": [
- | {
- | "name": "name",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "owner",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "createTime",
- | "dataTypeName": "int",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- |
- | },
- | {
- | "name": "clusterName",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | }
- | ]
- | },
- | "rows": [
- | {
- | "$typeName$": "DB",
- | "$id$": {
- | "$typeName$": "DB",
- | "version": 0
- | },
- | "owner": "John ETL",
- | "name": "Sales",
- | "createTime": 1000,
- | "clusterName": "test"
- | },
- | {
- | "$typeName$": "DB",
- | "$id$": {
- | "$typeName$": "DB",
- | "version": 0
- | },
- | "owner": "Jane BI",
- | "name": "Reporting",
- | "createTime": 1500,
- | "clusterName": "test"
- | }
- | ]
- | }""".stripMargin)
- }
-
- @Test def testName {
- val r = QueryProcessor.evaluate(_class("DB").field("name"), g, gp)
- validateJson(r, "{\n \"query\":\"DB.name\",\n \"dataType\":\"string\",\n \"rows\":[\n \"Sales\",\n \"Reporting\"\n ]\n}")
- }
-
- @Test def testFilter {
- var r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))), g, gp)
- validateJson(r, """{
- | "query": "DB where (name = \"Reporting\")",
- | "dataType": {
- | "superTypes": [],
- | "hierarchicalMetaTypeName": "org.apache.atlas.typesystem.types.ClassType",
- | "typeName": "DB",
- | "attributeDefinitions": [
- | {
- | "name": "name",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "owner",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "createTime",
- | "dataTypeName": "int",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "clusterName",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | }
- | ]
- | },
- | "rows": [
- | {
- | "$typeName$": "DB",
- | "$id$": {
- | "$typeName$": "DB",
- | "version": 0
- | },
- | "owner": "Jane BI",
- | "name": "Reporting",
- | "createTime": 1500,
- | "clusterName": "test"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testFilter2 {
- var r = QueryProcessor.evaluate(_class("DB").where(id("DB").field("name").`=`(string("Reporting"))), g, gp)
- validateJson(r, """{
- | "query": "DB where (name = \"Reporting\")",
- | "dataType": {
- | "superTypes": [],
- | "hierarchicalMetaTypeName": "org.apache.atlas.typesystem.types.ClassType",
- | "typeName": "DB",
- | "attributeDefinitions": [
- | {
- | "name": "name",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "owner",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "createTime",
- | "dataTypeName": "int",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "clusterName",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | }
- | ]
- | },
- | "rows": [
- | {
- | "$typeName$": "DB",
- | "$id$": {
- | "$typeName$": "DB",
- | "version": 0
- | },
- | "owner": "Jane BI",
- | "name": "Reporting",
- | "createTime": 1500,
- | "clusterName": "test"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testSelect {
- val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
- select(id("name"), id("owner")), g, gp)
- validateJson(r, """{
- | "query": "DB where (name = \"Reporting\") as _src1 select _src1.name as _src1.name, _src1.owner as _src1.owner",
- | "dataType": {
- | "typeName": "__tempQueryResultStruct1",
- | "attributeDefinitions": [
- | {
- | "name": "_src1.name",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "_src1.owner",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | }
- | ]
- | },
- | "rows": [
- | {
- | "$typeName$": "__tempQueryResultStruct1",
- | "_src1.owner": "Jane BI",
- | "_src1.name": "Reporting"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testIsTrait {
- val r = QueryProcessor.evaluate(_class("Table").where(isTrait("Dimension")), g, gp)
- validateJson(r, """{
- | "query":"Table where Table is Dimension",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Table",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"db",
- | "dataTypeName":"DB",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"sd",
- | "dataTypeName":"StorageDescriptor",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"created",
- | "dataTypeName":"date",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"product_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"time_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"customer_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testhasField {
- val r = QueryProcessor.evaluate(_class("DB").where(hasField("name")), g, gp)
- validateJson(r, """{
- | "query":"DB where DB has name",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"DB",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"owner",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"createTime",
- | "dataTypeName":"int",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"clusterName",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"DB",
- | "$id$":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "owner":"John ETL",
- | "name":"Sales",
- | "createTime":1000,
- | "clusterName":"test"
- | },
- | {
- | "$typeName$":"DB",
- | "$id$":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "owner":"Jane BI",
- | "name":"Reporting",
- | "createTime":1500,
- | "clusterName":"test"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testFieldReference {
- val r = QueryProcessor.evaluate(_class("DB").field("Table"), g, gp)
- validateJson(r, """{
- | "query":"DB Table",
- | "dataType":{
- | "superTypes":[ ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Table",
- | "attributeDefinitions":[
- | {
- | "name":"name",
- | "dataTypeName":"string",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"db",
- | "dataTypeName":"DB",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"sd",
- | "dataTypeName":"StorageDescriptor",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"created",
- | "dataTypeName":"date",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"product_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"time_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"customer_dim",
- | "$traits$":{
- | "Dimension":{
- | "$typeName$":"Dimension"
- | }
- | }
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_daily_mv"
- | },
- | {
- | "$typeName$":"Table",
- | "$id$":{
- | "$typeName$":"Table",
- | "version":0
- | },
- | "created":"2014-12-11T02:35:58.440Z",
- | "sd":{
- | "$typeName$":"StorageDescriptor",
- | "version":0
- | },
- | "db":{
- | "$typeName$":"DB",
- | "version":0
- | },
- | "name":"sales_fact_monthly_mv"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testBackReference {
- val r = QueryProcessor.evaluate(
- _class("DB").as("db").field("Table").where(id("db").field("name").`=`(string("Reporting"))), g, gp)
- validateJson(r, null)
- }
-
- @Test def testArith {
- val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
- select(id("name"), id("createTime") + int(1)), g, gp)
- validateJson(r, "{\n \"query\":\"DB where (name = \\\"Reporting\\\") as _src1 select _src1.name as _src1.name, (_src1.createTime + 1) as (_src1.createTime + 1)\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct3\",\n \"attributeDefinitions\":[\n {\n \"name\":\"_src1.name\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"(_src1.createTime + 1)\",\n \"dataTypeName\":\"int\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n
\"$typeName$\":\"__tempQueryResultStruct3\",\n \"(_src1.createTime + 1)\":1501,\n \"_src1.name\":\"Reporting\"\n }\n ]\n}")
- }
-
- @Test def testComparisonLogical {
- val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting")).
- and(id("createTime") > int(0))), g, gp)
- validateJson(r, """{
- | "query": "DB where (name = \"Reporting\") and (createTime > 0)",
- | "dataType": {
- | "superTypes": [
- |
- | ],
- | "hierarchicalMetaTypeName": "org.apache.atlas.typesystem.types.ClassType",
- | "typeName": "DB",
- | "attributeDefinitions": [
- | {
- | "name": "name",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "owner",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "createTime",
- | "dataTypeName": "int",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | },
- | {
- | "name": "clusterName",
- | "dataTypeName": "string",
- | "multiplicity": {
- | "lower": 0,
- | "upper": 1,
- | "isUnique": false
- | },
- | "isComposite": false,
- | "isUnique": false,
- | "isIndexable": false,
- | "reverseAttributeName": null
- | }
- | ]
- | },
- | "rows": [
- | {
- | "$typeName$": "DB",
- | "$id$": {
- | "$typeName$": "DB",
- | "version": 0
- | },
- | "owner": "Jane BI",
- | "name": "Reporting",
- | "createTime": 1500,
- | "clusterName": "test"
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testJoinAndSelect1 {
- val r = QueryProcessor.evaluate(
- _class("DB").as("db1").where(id("name").`=`(string("Sales"))).field("Table").as("tab").
- where(isTrait("Dimension")).
- select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g, gp
- )
- validateJson(r, "{\n \"query\":\"DB as db1 where (name = \\\"Sales\\\") Table as tab where DB as db1 where (name = \\\"Sales\\\") Table as tab is Dimension as _src1 select db1.name as dbName, tab.name as tabName\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct5\",\n \"attributeDefinitions\":[\n {\n \"name\":\"dbName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"tabName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n }\n
]\n },\n \"rows\":[\n {\n \"$typeName$\":\"__tempQueryResultStruct5\",\n \"dbName\":\"Sales\",\n \"tabName\":\"product_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct5\",\n \"dbName\":\"Sales\",\n \"tabName\":\"time_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct5\",\n \"dbName\":\"Sales\",\n \"tabName\":\"customer_dim\"\n }\n ]\n}")
- }
-
- @Test def testJoinAndSelect2 {
- val r = QueryProcessor.evaluate(
- _class("DB").as("db1").where((id("db1").field("createTime") > int(0))
- .or(id("name").`=`(string("Reporting")))).field("Table").as("tab")
- .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g, gp
- )
- validateJson(r, "{\n \"query\":\"DB as db1 where (createTime > 0) or (name = \\\"Reporting\\\") Table as tab select db1.name as dbName, tab.name as tabName\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct6\",\n \"attributeDefinitions\":[\n {\n \"name\":\"dbName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"tabName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"__t
empQueryResultStruct6\",\n \"dbName\":\"Sales\",\n \"tabName\":\"sales_fact\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct6\",\n \"dbName\":\"Sales\",\n \"tabName\":\"product_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct6\",\n \"dbName\":\"Sales\",\n \"tabName\":\"time_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct6\",\n \"dbName\":\"Sales\",\n \"tabName\":\"customer_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct6\",\n \"dbName\":\"Reporting\",\n \"tabName\":\"sales_fact_daily_mv\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct6\",\n \"dbName\":\"Reporting\",\n \"tabName\":\"sales_fact_monthly_mv\"\n }\n ]\n}")
- }
-
- @Test def testJoinAndSelect3 {
- val r = QueryProcessor.evaluate(
- _class("DB").as("db1").where((id("db1").field("createTime") > int(0))
- .and(id("db1").field("name").`=`(string("Reporting")))
- .or(id("db1").hasField("owner"))).field("Table").as("tab")
- .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g, gp
- )
- validateJson(r, "{\n \"query\":\"DB as db1 where (createTime > 0) and (name = \\\"Reporting\\\") or db1 has owner Table as tab select db1.name as dbName, tab.name as tabName\",\n \"dataType\":{\n \"typeName\":\"__tempQueryResultStruct7\",\n \"attributeDefinitions\":[\n {\n \"name\":\"dbName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"tabName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"
$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Sales\",\n \"tabName\":\"sales_fact\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Sales\",\n \"tabName\":\"product_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Sales\",\n \"tabName\":\"time_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Sales\",\n \"tabName\":\"customer_dim\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Reporting\",\n \"tabName\":\"sales_fact_daily_mv\"\n },\n {\n \"$typeName$\":\"__tempQueryResultStruct7\",\n \"dbName\":\"Reporting\",\n \"tabName\":\"sales_fact_monthly_mv\"\n }\n ]\n}")
- }
-
- @Test def testJoinAndSelect4 {
- val r = QueryProcessor.evaluate(
- _class("DB").as("db1").where(id("name").`=`(string("Sales"))).field("Table").as("tab").
- where(isTrait("Dimension")).
- select(id("db1").as("dbO"), id("tab").field("name").as("tabName")), g, gp
- )
- validateJson(r, "{\n \"query\":\"DB as db1 where (name = \\\"Sales\\\") Table as tab where DB as db1 where (name = \\\"Sales\\\") Table as tab is Dimension as _src1 select db1 as dbO, tab.name as tabName\",\n \"dataType\":{\n \"typeName\":\"\",\n \"attributeDefinitions\":[\n {\n \"name\":\"dbO\",\n \"dataTypeName\":\"DB\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"tabName\",\n \"dataTypeName\":\"string\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":false,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$
typeName$\":\"\",\n \"dbO\":{\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"tabName\":\"product_dim\"\n },\n {\n \"$typeName$\":\"\",\n \"dbO\":{\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"tabName\":\"time_dim\"\n },\n {\n \"$typeName$\":\"\",\n \"dbO\":{\n \"$typeName$\":\"DB\",\n \"version\":0\n },\n \"tabName\":\"customer_dim\"\n }\n ]\n}")
- }
-
- @Test def testArrayComparision {
- val p = QueryParser
- val e = p("Partition as p where values = ['2015-01-01']," +
- " table where name = 'sales_fact_daily_mv'," +
- " db where name = 'Reporting' and clusterName = 'test' select p").right.get
- val r = QueryProcessor.evaluate(e, g, gp)
- validateJson(r, """{
- | "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p as p",
- | "dataType":{
- | "typeName":"__tempQueryResultStruct2",
- | "attributeDefinitions":[
- | {
- | "name":"p",
- | "dataTypeName":"Partition",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"__tempQueryResultStruct2",
- | "p":{
- | "$typeName$":"Partition",
- | "version":0
- | }
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testArrayComparisionWithSelectOnArray {
- val p = QueryParser
- val e = p("Partition as p where values = ['2015-01-01']," +
- " table where name = 'sales_fact_daily_mv'," +
- " db where name = 'Reporting' and clusterName = 'test' select p.values").right.get
- val r = QueryProcessor.evaluate(e, g, gp)
- validateJson(r,
- """{
- | "query":"Partition as p where (values = [\"2015-01-01\"]) table where (name = \"sales_fact_daily_mv\") db where (name = \"Reporting\") and (clusterName = \"test\") as _src1 select p.values as p.values",
- | "dataType":{
- | "typeName":"__tempQueryResultStruct2",
- | "attributeDefinitions":[
- | {
- | "name":"p.values",
- | "dataTypeName":"array<string>",
- | "multiplicity":{
- | "lower":0,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"__tempQueryResultStruct2",
- | "p.values":[
- | "2015-01-01"
- | ]
- | }
- | ]
- |}
- """.stripMargin)
- }
-
- @Test def testArrayInWhereClause {
- val p = QueryParser
- val e = p("Partition as p where values = ['2015-01-01']").right.get
- val r = QueryProcessor.evaluate(e, g, gp)
- validateJson(r, """{
- | "query":"Partition as p where (values = [\"2015-01-01\"])",
- | "dataType":{
- | "superTypes":[
- |
- | ],
- | "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
- | "typeName":"Partition",
- | "attributeDefinitions":[
- | {
- | "name":"values",
- | "dataTypeName":"array<string>",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | },
- | {
- | "name":"table",
- | "dataTypeName":"Table",
- | "multiplicity":{
- | "lower":1,
- | "upper":1,
- | "isUnique":false
- | },
- | "isComposite":false,
- | "isUnique":false,
- | "isIndexable":false,
- | "reverseAttributeName":null
- | }
- | ]
- | },
- | "rows":[
- | {
- | "$typeName$":"Partition",
- | "$id$":{
- | "$typeName$":"Partition",
- | "version":0
- | },
- | "values":[
- | "2015-01-01"
- | ],
- | "table":{
- | "$typeName$":"Table",
- | "version":0
- | }
- | }
- | ]
- |}""".stripMargin)
- }
-
- @Test def testArrayWithStruct {
-// val p = new QueryParser
-// val e = p("from LoadProcess select inputTables").right.get
-// val r = QueryProcessor.evaluate(e, g)
- val r = QueryProcessor.evaluate(_class("LoadProcess").field("inputTables"), g, gp)
- validateJson(r)
- }
-
- @Test(expectedExceptions = Array(classOf[ExpressionException]))
- def testNegativeInvalidType {
- val p = QueryParser
- val e = p("from blah").right.get
- QueryProcessor.evaluate(e, g, gp)
- }
-
- @Test def testJoinAndSelect5 {
- val p = QueryParser
- val e = p("Table as t where name = 'sales_fact' db where name = 'Sales' and owner = 'John ETL' select t").right.get
- val r = QueryProcessor.evaluate(e, g, gp)
- validateJson(r)
- }
-}
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala b/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
deleted file mode 100755
index 880a0c6..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.{DBSandboxer, TestUtils}
-import org.apache.atlas.discovery.graph.DefaultGraphPersistenceStrategy
-import org.apache.atlas.query.Expressions._class
-import org.apache.atlas.query.Expressions._trait
-import org.apache.atlas.query.Expressions.id
-import org.apache.atlas.repository.graph.GraphBackedMetadataRepository
-import org.apache.atlas.repository.graphdb.AtlasGraph
-import org.apache.atlas.typesystem.types.TypeSystem
-import org.testng.annotations._
-import org.apache.atlas.repository.graph.AtlasGraphProvider
-
-class GremlinTest2 extends BaseGremlinTest {
-
- var g: AtlasGraph[_,_] = null
- var gp:GraphPersistenceStrategies = null;
-
- @BeforeMethod
- def resetRequestContext() {
- TestUtils.resetRequestContext();
- }
-
- @BeforeClass
- def beforeAll() {
- TypeSystem.getInstance().reset()
- QueryTestsUtils.setupTypes
- var repo = new GraphBackedMetadataRepository(null, null);
- gp = new DefaultGraphPersistenceStrategy(repo)
- g = QueryTestsUtils.setupTestGraph(repo)
- }
-
- @AfterClass
- def afterAll() {
- AtlasGraphProvider.cleanup();
- }
-
- @Test def testTraitSelect {
- val r = QueryProcessor.evaluate(_class("Table").as("t").join("Dimension").as("dim").select(id("t"), id("dim")), g)
- validateJson(r, "{\n \"query\":\"Table as t.Dimension as dim select t as _col_0, dim as _col_1\",\n \"dataType\":{\n \"typeName\":\"\",\n \"attributeDefinitions\":[\n {\n \"name\":\"_col_0\",\n \"dataTypeName\":\"Table\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n },\n {\n \"name\":\"_col_1\",\n \"dataTypeName\":\"Dimension\",\n \"multiplicity\":{\n \"lower\":0,\n \"upper\":1,\n \"isUnique\":false\n },\n \"isComposite\":false,\n \"isUnique\":false,\n \"isIndexable\":true,\n \"reverseAttributeName\":null\n }\n ]\n },\n \"rows\":[\n {\n \"$typeName$\":\"\",\n \"_col_1\":{\n \"$typeName$\":\"Dimension\"\n },\n \"_col_0\"
:{\n \"id\":\"3328\",\n \"$typeName$\":\"Table\",\n \"version\":0\n }\n },\n {\n \"$typeName$\":\"\",\n \"_col_1\":{\n \"$typeName$\":\"Dimension\"\n },\n \"_col_0\":{\n \"id\":\"4864\",\n \"$typeName$\":\"Table\",\n \"version\":0\n }\n },\n {\n \"$typeName$\":\"\",\n \"_col_1\":{\n \"$typeName$\":\"Dimension\"\n },\n \"_col_0\":{\n \"id\":\"6656\",\n \"$typeName$\":\"Table\",\n \"version\":0\n }\n }\n ]\n}")
- }
-
- @Test def testTrait {
- val r = QueryProcessor.evaluate(_trait("Dimension"), g)
- validateJson(r)
- }
-
- @Test def testTraitInstance {
- val r = QueryProcessor.evaluate(_trait("Dimension").traitInstance(), g)
- validateJson(r)
- }
-
- @Test def testInstanceAddedToFilter {
- val r = QueryProcessor.evaluate(_trait("Dimension").hasField("typeName"), g)
- validateJson(r)
- }
-
- @Test def testInstanceFilter {
- val r = QueryProcessor.evaluate(_trait("Dimension").traitInstance().hasField("name"), g)
- validateJson(r)
- }
-
- @Test def testLineageWithPath {
- val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable")).path(), g)
- validateJson(r)
- }
-
- @Test def testLineageAllSelectWithPath {
- val r = QueryProcessor.evaluate(_class("Table").as("src").loop(id("LoadProcess").field("outputTable")).as("dest").
- select(id("src").field("name").as("srcTable"), id("dest").field("name").as("destTable")).path(), g)
- validateJson(r)
- }
-
- @Test def testLineageAllSelectWithPathFromParser {
- val p = QueryParser
- val e = p("Table as src loop (LoadProcess outputTable) as dest " +
- "select src.name as srcTable, dest.name as destTable withPath").right.get
- //Table as src loop (LoadProcess where LoadProcess.outputTable) as dest select src.name as srcTable, dest.name as destTable withPath
- val r = QueryProcessor.evaluate(e, g)
- validateJson(r)
- }
-
- @Test def testLineageAllSelectWithPathFromParser2 {
- val p = QueryParser
-
- val e = p("Table as src loop (`LoadProcess->outputTable` inputTables) as dest " +
- "select src.name as srcTable, dest.name as destTable withPath").right.get
- val r = QueryProcessor.evaluate(e, g)
- validateJson(r)
- }
-
- @Test def testHighLevelLineage {
- val r = InputLineageClosureQuery("Table", "name", "sales_fact_monthly_mv",
- "LoadProcess",
- "inputTables",
- "outputTable",
- None, Some(List("name")), true, getPersistenceStrategy(g), g).evaluate()
- validateJson(r)
- }
-
- @Test def testHighLevelLineageReturnGraph {
- val q = InputLineageClosureQuery("Table", "name", "sales_fact_monthly_mv",
- "LoadProcess",
- "inputTables",
- "outputTable",
- None, Some(List("name")), true, getPersistenceStrategy(g), g);
- val gr = q.evaluate();
- val r = q.graph(gr);
-
- println(r.toInstanceJson)
- //validateJson(r)
- }
-
- @Test def testHighLevelWhereUsed {
- val r = OutputLineageClosureQuery("Table", "name", "sales_fact",
- "LoadProcess",
- "inputTables",
- "outputTable",
- None, Some(List("name")), true, getPersistenceStrategy(g), g).evaluate()
- validateJson(r)
- }
-
- @Test def testHighLevelWhereUsedReturnGraph {
- val q = OutputLineageClosureQuery("Table", "name", "sales_fact",
- "LoadProcess",
- "inputTables",
- "outputTable",
- None, Some(List("name")), true, getPersistenceStrategy(g), g)
- val gr = q.evaluate();
- val r = q.graph(gr);
- println(r.toInstanceJson)
- }
-
- private def getPersistenceStrategy(g: AtlasGraph[_,_]) : GraphPersistenceStrategies = return GraphPersistenceStrategy1(g)
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala b/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
deleted file mode 100644
index fa0d341..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
+++ /dev/null
@@ -1,243 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import scala.collection.JavaConversions._
-
-
-import org.apache.atlas.typesystem.ITypedReferenceableInstance
-import org.apache.atlas.typesystem.json.TypedReferenceableInstanceSerializer
-import org.apache.atlas.utils.HiveModel.Column
-import org.apache.atlas.utils.HiveModel.DB
-import org.apache.atlas.utils.HiveModel.HiveOrder
-import org.apache.atlas.utils.HiveModel.LoadProcess
-import org.apache.atlas.utils.HiveModel.Partition
-import org.apache.atlas.utils.HiveModel.StorageDescriptor
-import org.apache.atlas.utils.HiveModel.Table
-import org.apache.atlas.utils.HiveModel.View
-import scala.collection.mutable.Buffer
-
-
-
-object HiveTitanSample {
-
- val MetricTrait = "Metric"
- val DimensionTrait = "Dimension"
- val ETLTrait = "ETL"
- val JdbcAccessTrait = "JdbcAccess"
-
- val salesDB = new DB("Sales", "John ETL", 1000, "test")
-
-
-
- val salesFact = new Table("sales_fact",
- salesDB,
- new StorageDescriptor("TextInputFormat",
- "TextOutputFormat", List(new HiveOrder("customer_id", 0))),
- List(
- new Column("time_id", "int"),
- new Column("product_id", "int"),
- new Column("customer_id", "int"),
- new Column("created", "date"),
- new Column("sales", "double").withTrait(MetricTrait)
- )
- );
-
-
- val productDim = new Table("product_dim",
- salesDB,
- new StorageDescriptor("TextInputFormat",
- "TextOutputFormat", List(new HiveOrder("product_id", 0))),
- List(
- new Column("product_id", "int"),
- new Column("product_name", "string"),
- new Column("brand_name", "string")
- )
- ).withTrait(DimensionTrait)
-
- val timeDim = new Table("time_dim",
- salesDB,
- new StorageDescriptor("TextInputFormat",
- "TextOutputFormat", List(new HiveOrder("time_id", 0))),
- List(
- new Column("time_id", "int"),
- new Column("dayOfYear", "int"),
- new Column("weekDay", "string")
- )
- ).withTrait(DimensionTrait)
-
- val customerDim = new Table("customer_dim",
- salesDB,
- new StorageDescriptor("TextInputFormat",
- "TextOutputFormat", List(new HiveOrder("customer_id", 0))),
- List(
- new Column("customer_id", "int"),
- new Column("name", "int"),
- new Column("address", "string").withTrait("PII")
- )
- ).withTrait(DimensionTrait)
-
-
- val reportingDB = new DB("Reporting", "Jane BI", 1500, "test")
- val salesFactDaily = new Table("sales_fact_daily_mv",
- reportingDB,
- new StorageDescriptor("TextInputFormat",
- "TextOutputFormat", List(new HiveOrder("customer_id", 0))),
- List(
- new Column("time_id", "int"),
- new Column("product_id", "int"),
- new Column("customer_id", "int"),
- new Column("sales", "double").withTrait(MetricTrait)
- )
- )
-
- val loadSalesFactDaily = new LoadProcess(
- "loadSalesDaily",
- List(salesFact, timeDim),
- salesFactDaily
- ).withTrait(ETLTrait)
-
-
-
- val productDimView = new View(
- "product_dim_view",
- reportingDB,
- List(productDim)
- ).withTraits(List(DimensionTrait, JdbcAccessTrait))
-
- val customerDimView = new View(
- "customer_dim_view",
- reportingDB,
- List(customerDim)
-
- ).withTraits(List(DimensionTrait, JdbcAccessTrait))
-
- val salesFactMonthly = new Table("sales_fact_monthly_mv",
- reportingDB,
- new StorageDescriptor(
- "TextInputFormat",
- "TextOutputFormat",
- List(new HiveOrder("customer_id", 0))
- ),
- List(
- new Column("time_id", "int"),
- new Column("product_id", "int"),
- new Column("customer_id", "int"),
- new Column("sales", "double").withTrait(MetricTrait)
- )
- )
- val loadSalesFactMonthly = new LoadProcess("loadSalesMonthly",
- List(salesFactDaily), salesFactMonthly).withTraits(List(ETLTrait))
-
- val salesDailyPartition = new Partition(List("2015-01-01"), salesFactDaily)
-
- import scala.collection.JavaConversions._
-
- def getEntitiesToCreate() : Buffer[ITypedReferenceableInstance] = {
- var list = salesDB.getTypedReferencebles() ++
- salesFact.getTypedReferencebles() ++
- productDim.getTypedReferencebles() ++
- timeDim.getTypedReferencebles() ++
- customerDim.getTypedReferencebles() ++
- reportingDB.getTypedReferencebles() ++
- salesFactDaily.getTypedReferencebles() ++
- loadSalesFactDaily.getTypedReferencebles() ++
- productDimView.getTypedReferencebles() ++
- customerDimView.getTypedReferencebles() ++
- salesFactMonthly.getTypedReferencebles() ++
- loadSalesFactMonthly.getTypedReferencebles() ++
- salesDailyPartition.getTypedReferencebles();
- return list;
-
- }
-
-
-
- val GremlinQueries = List(
- // 1. List all DBs
- """g.V.has("typeName", "DB")""",
-
- // 2. List all DB nmes
- """g.V.has("typeName", "DB").name""",
-
- // 3. List all Tables in Reporting DB
- """g.V.has("typeName", "DB").has("name", "Reporting").inE("Table.db").outV""",
- """g.V.has("typeName", "DB").as("db").inE("Table.db").outV.and(_().back("db").has("name", "Reporting"))""",
-
- // 4. List all Tables in Reporting DB, list as D.name, Tbl.name
- """
- g.V.has("typeName", "DB").has("name", "Reporting").as("db").inE("Table.db").outV.as("tbl").select{it.name}{it.name}
- """.stripMargin,
-
- // 5. List all tables that are Dimensions and have the TextInputFormat
- """
- g.V.as("v").and(_().outE("Table.Dimension"), _().out("Table.sd").has("inputFormat", "TextInputFormat")).name
- """.stripMargin,
-
- // 6. List all tables that are Dimensions or have the TextInputFormat
- """
- g.V.as("v").or(_().outE("Table.Dimension"), _().out("Table.sd").has("inputFormat", "TextInputFormat")).name
- """.stripMargin,
-
- // 7. List tables that have at least 1 PII column
- """
- g.V.has("typeName", "Table").as("tab").out("Table.sd").in("Column.sd").as("column"). \
- out("Column.PII").select.groupBy{it.getColumn("tab")}{it.getColumn("column")}{[ "c" : it.size]}.cap.scatter.filter{it.value.c > 0}. \
- transform{it.key}.name """.stripMargin
-
- // 7.a from Table as tab -> g.V.has("typeName", "Table").as("tab")
- // 7.b sd.Column as column -> out("Table.sd").in("Column.sd").as("column")
- // 7.c is PII -> out("Column.PII")
- // 7.d select tab, column -> select{it}{it}
- // 7.e groupBy tab compute count(column) as c
- // 7.f where c > 0
-
- // 7.a Alias(Type("Table"), "tab")
- // 7b. Field("sd", Alias(Type("Table"), "tab"))
- // Alias(Field("Column", Field("sd", Alias(Type("Table"), "tab"))), "column")
- // 7.c Filter(is("PII"), Alias(Field("Column", Field("sd", Alias(Type("Table"), "tab"))), "column"))
- // 7.d
- )
-}
-
-//object TestApp extends App with GraphUtils {
-//
-// val g: TitanGraph = TitanGraphProvider.getGraphInstance
-// val manager: ScriptEngineManager = new ScriptEngineManager
-// val engine: ScriptEngine = manager.getEngineByName("gremlin-groovy")
-// val bindings: Bindings = engine.createBindings
-// bindings.put("g", g)
-//
-// val hiveGraphFile = FileUtils.getTempDirectory().getPath + File.separator + System.nanoTime() + ".gson"
-// HiveTitanSample.writeGson(hiveGraphFile)
-// bindings.put("hiveGraphFile", hiveGraphFile)
-//
-// try {
-// engine.eval("g.loadGraphSON(hiveGraphFile)", bindings)
-//
-// println(engine.eval("g.V.typeName.toList()", bindings))
-//
-// HiveTitanSample.GremlinQueries.foreach { q =>
-// println(q)
-// println("Result: " + engine.eval(q + ".toList()", bindings))
-// }
-// } finally {
-// g.shutdown()
-// }
-//}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/atlas/blob/435fe3fb/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala b/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
deleted file mode 100755
index 10237a9..0000000
--- a/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.query
-
-import org.apache.atlas.DBSandboxer
-import org.testng.Assert
-import org.testng.annotations.{Listeners, Test}
-
-import scala.util.parsing.input.CharArrayReader
-
-class LexerTest {
-
- def scan(str: String): QueryParser.lexical.ParseResult[_] = {
- val l = QueryParser.lexical
- var s: l.Input = new CharArrayReader(str.toCharArray)
- var r = (l.whitespace.? ~ l.token)(s)
- s = r.next
-
- while (r.successful && !s.atEnd) {
- s = r.next
- if (!s.atEnd) {
- r = (l.whitespace.? ~ l.token)(s)
- }
- }
- r.asInstanceOf[QueryParser.lexical.ParseResult[_]]
- }
-
- @Test def testSimple {
- val r = scan("""DB where db1.name""")
- Assert.assertTrue(r.successful)
-
- }
-}