You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@usergrid.apache.org by sn...@apache.org on 2015/02/26 18:11:43 UTC
[42/50] [abbrv] incubator-usergrid git commit: This closes #159
http://git-wip-us.apache.org/repos/asf/incubator-usergrid/blob/bedf5afb/stack/services/src/test/java/org/apache/usergrid/management/export/ExportServiceIT.java
----------------------------------------------------------------------
diff --cc stack/services/src/test/java/org/apache/usergrid/management/export/ExportServiceIT.java
index 73ce1b1,0000000..3a6d8f9
mode 100644,000000..100644
--- a/stack/services/src/test/java/org/apache/usergrid/management/export/ExportServiceIT.java
+++ b/stack/services/src/test/java/org/apache/usergrid/management/export/ExportServiceIT.java
@@@ -1,1115 -1,0 +1,1116 @@@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.usergrid.management.export;
+
+
+import java.io.File;
+import java.io.FileReader;
+import java.util.*;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.util.concurrent.Service;
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.usergrid.batch.service.JobSchedulerService;
+import org.jclouds.ContextBuilder;
+import org.jclouds.blobstore.BlobStore;
+import org.jclouds.blobstore.BlobStoreContext;
+import org.jclouds.blobstore.domain.Blob;
+import org.jclouds.http.config.JavaUrlHttpCommandExecutorServiceModule;
+import org.jclouds.logging.log4j.config.Log4JLoggingModule;
+import org.jclouds.netty.config.NettyPayloadModule;
+import org.junit.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.usergrid.NewOrgAppAdminRule;
+import org.apache.usergrid.ServiceITSetup;
+import org.apache.usergrid.ServiceITSetupImpl;
+import org.apache.usergrid.batch.JobExecution;
+import org.apache.usergrid.cassandra.ClearShiroSubject;
+
+import org.apache.usergrid.management.ApplicationInfo;
+import org.apache.usergrid.management.OrganizationInfo;
+import org.apache.usergrid.management.UserInfo;
+import org.apache.usergrid.persistence.Entity;
+import org.apache.usergrid.persistence.EntityManager;
+import org.apache.usergrid.persistence.SimpleEntityRef;
+import org.apache.usergrid.persistence.entities.JobData;
+import org.apache.usergrid.setup.ConcurrentProcessSingleton;
+
+import com.amazonaws.SDKGlobalConfiguration;
+import com.google.common.collect.ImmutableSet;
+import com.google.inject.Module;
+
+import static org.apache.usergrid.TestHelper.newUUIDString;
+import static org.apache.usergrid.TestHelper.uniqueApp;
+import static org.apache.usergrid.TestHelper.uniqueOrg;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+
+/**
+ *
+ *
+ */
+public class ExportServiceIT {
+
+ private static final Logger logger = LoggerFactory.getLogger( ExportServiceIT.class );
+
+
+ @ClassRule
+ public static final ServiceITSetup setup = new ServiceITSetupImpl( );
+
+ @Rule
+ public ClearShiroSubject clearShiroSubject = new ClearShiroSubject();
+
+ @Rule
+ public NewOrgAppAdminRule newOrgAppAdminRule = new NewOrgAppAdminRule( setup );
+
+ // app-level data generated only once
+ private UserInfo adminUser;
+ private OrganizationInfo organization;
+ private UUID applicationId;
+
+ private static String bucketPrefix;
+
+ private String bucketName;
+
+ @Before
+ public void setup() throws Exception {
+ logger.info("in setup");
+
+ // start the scheduler after we're all set up
+ try {
+
+ JobSchedulerService jobScheduler = ConcurrentProcessSingleton.getInstance().getSpringResource().getBean(JobSchedulerService.class);
+ if (jobScheduler.state() != Service.State.RUNNING) {
- jobScheduler.startAndWait();
++ jobScheduler.startAsync();
++ jobScheduler.awaitRunning();
+ }
+ } catch ( Exception e ) {
+ logger.warn("Ignoring error starting jobScheduler, already started?", e);
+ }
+
+ adminUser = newOrgAppAdminRule.getAdminInfo();
+ organization = newOrgAppAdminRule.getOrganizationInfo();
+ applicationId = newOrgAppAdminRule.getApplicationInfo().getId();
+
+ setup.getEmf().refreshIndex();
+ }
+
+
+ @Before
+ public void before() {
+
+ boolean configured =
+ !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR))
+ && !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR))
+ && !StringUtils.isEmpty(System.getProperty("bucketName"));
+
+ if ( !configured ) {
+ logger.warn("Skipping test because {}, {} and bucketName not " +
+ "specified as system properties, e.g. in your Maven settings.xml file.",
+ new Object[] {
+ SDKGlobalConfiguration.SECRET_KEY_ENV_VAR,
+ SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR
+ });
+ }
+
+ Assume.assumeTrue( configured );
+
+ adminUser = newOrgAppAdminRule.getAdminInfo();
+ organization = newOrgAppAdminRule.getOrganizationInfo();
+ applicationId = newOrgAppAdminRule.getApplicationInfo().getId();
+
+ bucketPrefix = System.getProperty( "bucketName" );
+ bucketName = bucketPrefix + RandomStringUtils.randomAlphanumeric(10).toLowerCase();
+ }
+
+
+ //Tests to make sure we can call the job with mock data and it runs.
+ @Ignore("Connections won't save when run with maven, but on local builds it will.")
+ public void testConnectionsOnCollectionExport() throws Exception {
+
+ File f = null;
+ int indexCon = 0;
+
+ try {
+ f = new File( "testFileConnections.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't then don't do anything and carry on.
+ }
+ f.deleteOnExit();
+
+ S3Export s3Export = new MockS3ExportImpl("testFileConnections.json" );
+
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+ payload.put( "collectionName", "users" );
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+ //intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[2];
+ //creates entities
+ for ( int i = 0; i < 2; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "meatIsGreat" + i );
+ userProperties.put( "email", "grey" + i + "@anuff.com" );//String.format( "test%i@anuff.com", i ) );
+
+ entity[i] = em.create( "users", userProperties );
+ }
+ //creates connections
+ em.createConnection( em.get( new SimpleEntityRef( "user", entity[0].getUuid() ) ), "Vibrations",
+ em.get( new SimpleEntityRef( "user", entity[1].getUuid() ) ) );
+ em.createConnection(
+ em.get( new SimpleEntityRef( "user", entity[1].getUuid()) ), "Vibrations",
+ em.get( new SimpleEntityRef( "user", entity[0].getUuid()) ) );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ HashMap<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ List usersList = (List)collectionsMap.get("users");
+
+ int indexApp = 0;
+ for ( indexApp = 0; indexApp < usersList.size(); indexApp++ ) {
+ Map user = (Map)usersList.get( indexApp );
+ Map userProps = (Map)user.get("Metadata");
+ String uuid = ( String ) userProps.get( "uuid" );
+ if ( entity[0].getUuid().toString().equals( uuid ) ) {
+ break;
+ }
+ }
+
+ assertTrue("Uuid was not found in exported files. ", indexApp < usersList.size());
+
+ Map userMap = (Map)usersList.get( indexApp );
+ Map connectionsMap = (Map)userMap.get("connections");
+ assertNotNull( connectionsMap );
+
+ List vibrationsList = (List)connectionsMap.get( "Vibrations" );
+
+ assertNotNull( vibrationsList );
+
+ f.deleteOnExit();
+ }
+
+
+ @Test //Connections won't save when run with maven, but on local builds it will.
+ public void testConnectionsOnApplicationEndpoint() throws Exception {
+
+ File f = null;
+
+ try {
+ f = new File( "testConnectionsOnApplicationEndpoint.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't then don't do anything and carry on.
+ }
+
+ String fileName = "testConnectionsOnApplicationEndpoint.json";
+
+ S3Export s3Export = new MockS3ExportImpl( "testConnectionsOnApplicationEndpoint.json" );
+
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder( appName );
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+
+ // intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[2];
+
+ // creates entities
+ for ( int i = 0; i < 2; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "billybob" + i );
+ userProperties.put( "email", "test" + i + "@anuff.com" );//String.format( "test%i@anuff.com", i ) );
+
+ entity[i] = em.create( "users", userProperties );
+ }
+ em.refreshIndex();
+ //creates connections
+ em.createConnection( em.get( new SimpleEntityRef( "user", entity[0].getUuid() ) ), "Vibrations",
+ em.get( new SimpleEntityRef( "user", entity[1].getUuid() ) ) );
+ em.createConnection(
+ em.get( new SimpleEntityRef( "user", entity[1].getUuid())), "Vibrations",
+ em.get( new SimpleEntityRef( "user", entity[0].getUuid())) );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ //create and initialize jobData returned in JobExecution.
+ JobData jobData = jobDataCreator(payload,exportUUID,s3Export);
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ HashMap<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ List usersList = (List)collectionsMap.get("users");
+
+ int indexApp = 0;
+ for ( indexApp = 0; indexApp < usersList.size(); indexApp++ ) {
+ Map user = (Map)usersList.get( indexApp );
+ Map userProps = (Map)user.get("Metadata");
+ String uuid = ( String ) userProps.get( "uuid" );
+ if ( entity[0].getUuid().toString().equals( uuid ) ) {
+ break;
+ }
+ }
+
+ assertTrue("Uuid was not found in exported files. ", indexApp < usersList.size());
+
+ Map userMap = (Map)usersList.get( indexApp );
+ Map connectionsMap = (Map)userMap.get("connections");
+ assertNotNull( connectionsMap );
+
+ List vibrationsList = (List)connectionsMap.get( "Vibrations" );
+
+ assertNotNull( vibrationsList );
+
+ f.deleteOnExit();
+ }
+
+ @Test
+ public void testExportOneOrgCollectionEndpoint() throws Exception {
+
+ File f = null;
+
+
+ try {
+ f = new File( "exportOneOrg.json" );
+ }
+ catch ( Exception e ) {
+ //consumed because this checks to see if the file exists.
+ // If it doesn't then don't do anything and carry on.
+ }
+
+ //create another org to ensure we don't export it
+ newOrgAppAdminRule.createOwnerAndOrganization(
+ "noExport"+newUUIDString(),
+ "junkUserName"+newUUIDString(),
+ "junkRealName"+newUUIDString(),
+ newUUIDString()+"ugExport@usergrid.com",
+ "123456789" );
+
+ S3Export s3Export = new MockS3ExportImpl("exportOneOrg.json");
+ // s3Export.setFilename( "exportOneOrg.json" );
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+ payload.put( "collectionName", "roles" );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator(payload,exportUUID,s3Export);
+
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ String collectionName = (String)collectionsMap.keySet().iterator().next();
+ List collection = (List)collectionsMap.get(collectionName);
+
+ for ( Object o : collection ) {
+ Map entityMap = (Map)o;
+ Map metadataMap = (Map)entityMap.get("Metadata");
+ String entityName = (String)metadataMap.get("name");
+ assertFalse( "junkRealName".equals( entityName ) );
+ }
+ f.deleteOnExit();
+ }
+
+
+ //
+ //creation of files doesn't always delete itself
+ @Test
+ public void testExportOneAppOnCollectionEndpoint() throws Exception {
+
+ final String orgName = uniqueOrg();
+ final String appName = uniqueApp();
+
+
+ File f = null;
+
+ try {
+ f = new File( "exportOneApp.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't, don't do anything and carry on.
+ }
+ f.deleteOnExit();
+
+
+ UUID appId = setup.getEmf().createApplication( orgName, appName );
+
+
+ EntityManager em = setup.getEmf().getEntityManager( appId );
+ //intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[1];
+ //creates entities
+ for ( int i = 0; i < 1; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "junkRealName" );
+ userProperties.put( "email", "test" + i + "@anuff.com" );
+ entity[i] = em.create( "user", userProperties );
+ }
+
+ S3Export s3Export = new MockS3ExportImpl("exportOneApp.json");
+ //s3Export.setFilename( "exportOneApp.json" );
+ ExportService exportService = setup.getExportService();
+
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator(payload,exportUUID,s3Export);
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ String collectionName = (String)collectionsMap.keySet().iterator().next();
+ List collection = (List)collectionsMap.get(collectionName);
+
+ for ( Object o : collection ) {
+ Map entityMap = (Map)o;
+ Map metadataMap = (Map)entityMap.get("Metadata");
+ String entityName = (String)metadataMap.get("name");
+ assertFalse( "junkRealName".equals( entityName ) );
+ }
+ }
+
+
+ @Test
+ public void testExportOneAppOnApplicationEndpointWQuery() throws Exception {
+
+ File f = null;
+ try {
+ f = new File( "exportOneAppWQuery.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't, don't do anything and carry on.
+ }
+ f.deleteOnExit();
+
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+ //intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[1];
+ //creates entities
+ for ( int i = 0; i < 1; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "name", "me" );
+ userProperties.put( "username", "junkRealName" );
+ userProperties.put( "email", "burp" + i + "@anuff.com" );
+ entity[i] = em.create( "users", userProperties );
+ }
+
+ S3Export s3Export = new MockS3ExportImpl("exportOneAppWQuery.json" );
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "query", "select * where username = 'junkRealName'" );
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator(payload,exportUUID,s3Export);
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ em.refreshIndex();
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ String collectionName = (String)collectionsMap.keySet().iterator().next();
+ List collection = (List)collectionsMap.get( collectionName );
+
+ for ( Object o : collection ) {
+ Map entityMap = (Map)o;
+ Map metadataMap = (Map)entityMap.get("Metadata");
+ String entityName = (String)metadataMap.get("name");
+ assertFalse( "junkRealName".equals( entityName ) );
+ }
+ }
+
+
+ @Test
+ public void testExportOneCollection() throws Exception {
+
+ File f = null;
+ int entitiesToCreate = 5;
+
+ try {
+ f = new File( "exportOneCollection.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't, don't do anything and carry on.
+ }
+
+ f.deleteOnExit();
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+
+ // em.createApplicationCollection( "qtsMagics" );
+ // intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[entitiesToCreate];
+ //creates entities
+ for ( int i = 0; i < entitiesToCreate; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "billybob" + i );
+ userProperties.put( "email", "test" + i + "@anuff.com" );
+ entity[i] = em.create( "qtsMagics", userProperties );
+ }
+
+ S3Export s3Export = new MockS3ExportImpl("exportOneCollection.json" );
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+ payload.put( "collectionName", "qtsMagics" );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator(payload,exportUUID,s3Export);
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ em.refreshIndex();
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ HashMap<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ String collectionName = (String)collectionsMap.keySet().iterator().next();
+ List collection = (List)collectionsMap.get( collectionName );
+
+ assertEquals(entitiesToCreate, collection.size());
+ }
+
+
+ @Test
+ public void testExportOneCollectionWQuery() throws Exception {
+
+ File f = null;
+ int entitiesToCreate = 5;
+
+ try {
+ f = new File( "exportOneCollectionWQuery.json" );
+ }
+ catch ( Exception e ) {
+ // consumed because this checks to see if the file exists.
+ // If it doesn't, don't do anything and carry on.
+ }
+ f.deleteOnExit();
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+ em.createApplicationCollection( "baconators" );
+ em.refreshIndex();
+
+ //initialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[entitiesToCreate];
+
+ // creates entities
+ for ( int i = 0; i < entitiesToCreate; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "billybob" + i );
+ userProperties.put( "email", "test" + i + "@anuff.com" );
+ entity[i] = em.create( "baconators", userProperties );
+ }
+
+ S3Export s3Export = new MockS3ExportImpl("exportOneCollectionWQuery.json");
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "query", "select * where username contains 'billybob0'" );
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+ payload.put( "collectionName", "baconators" );
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator( payload, exportUUID, s3Export );
+
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+
+ em.refreshIndex();
+
+ exportService.doExport( jobExecution );
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String,Object> jsonMap = mapper.readValue(new FileReader( f ), typeRef);
+
+ Map collectionsMap = (Map)jsonMap.get("collections");
+ String collectionName = (String)collectionsMap.keySet().iterator().next();
+ List collectionList = (List)collectionsMap.get( collectionName );
+
+ assertEquals(1, collectionList.size());
+ }
+
+
+ @Test
+ @Ignore("this is a meaningless test because our export format does not support export of organizations")
+ public void testExportOneOrganization() throws Exception {
+
+ // create a bunch of organizations, each with applications and collections of entities
+
+ int maxOrgs = 3;
+ int maxApps = 3;
+ int maxEntities = 20;
+
+ List<ApplicationInfo> appsMade = new ArrayList<>();
+ List<OrganizationInfo> orgsMade = new ArrayList<>();
+
+ for ( int orgIndex = 0; orgIndex < maxOrgs; orgIndex++ ) {
+
+
+ String orgName = "org_" + RandomStringUtils.randomAlphanumeric(10);
+ OrganizationInfo orgMade = setup.getMgmtSvc().createOrganization( orgName, adminUser, true );
+ orgsMade.add( orgMade );
+ logger.debug("Created org {}", orgName);
+
+ for ( int appIndex = 0; appIndex < maxApps; appIndex++ ) {
+
+ String appName = "app_" + RandomStringUtils.randomAlphanumeric(10);
+ ApplicationInfo appMade = setup.getMgmtSvc().createApplication( orgMade.getUuid(), appName );
+ appsMade.add( appMade );
+ logger.debug("Created app {}", appName);
+
+ for (int entityIndex = 0; entityIndex < maxEntities; entityIndex++) {
+
+ EntityManager appEm = setup.getEmf().getEntityManager( appMade.getId() );
+ appEm.create( appName + "_type", new HashMap<String, Object>() {{
+ put("property1", "value1");
+ put("property2", "value2");
+ }});
+ }
+ }
+ }
+
+ // export one of the organizations only, using mock S3 export that writes to local disk
+
+ String exportFileName = "exportOneOrganization.json";
+ S3Export s3Export = new MockS3ExportImpl( exportFileName );
+
+ HashMap<String, Object> payload = payloadBuilder(appsMade.get(0).getName());
+ payload.put("organizationId", orgsMade.get(0).getUuid() );
+ payload.put( "applicationId", appsMade.get(0).getId() );
+
+ ExportService exportService = setup.getExportService();
+ UUID exportUUID = exportService.schedule( payload );
+
+ JobData jobData = jobDataCreator( payload, exportUUID, s3Export );
+ JobExecution jobExecution = mock( JobExecution.class );
+ when( jobExecution.getJobData() ).thenReturn(jobData);
+
+ exportService.doExport( jobExecution );
+
+ // finally, we check that file was created and contains the right number of entities in the array
+
+ File exportedFile = new File( exportFileName );
+ exportedFile.deleteOnExit();
+
+ TypeReference<HashMap<String,Object>> typeRef
+ = new TypeReference<HashMap<String,Object>>() {};
+
+ ObjectMapper mapper = new ObjectMapper();
+ Map<String,Object> jsonMap = mapper.readValue(new FileReader( exportedFile ), typeRef);
+ Map collectionsMap = (Map)jsonMap.get("collections");
+
+ List collectionList = (List)collectionsMap.get("users");
+
+ assertEquals( 3, collectionList.size() );
+ }
+
+
+ @Test
+ public void testExportDoJob() throws Exception {
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+
+ JobData jobData = new JobData();
+ jobData.setProperty( "jobName", "exportJob" );
+
+ // this needs to be populated with properties of export info
+ jobData.setProperty( "exportInfo", payload );
+
+ JobExecution jobExecution = mock( JobExecution.class );
+
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+ when( jobExecution.getJobId() ).thenReturn( UUID.randomUUID() );
+
+ ExportJob job = new ExportJob();
+ ExportService eS = mock( ExportService.class );
+ job.setExportService( eS );
+ try {
+ job.doJob( jobExecution );
+ }
+ catch ( Exception e ) {
+ logger.error("Error doing job", e);
+ assert ( false );
+ }
+ assert ( true );
+ }
+
+ //tests that with empty job data, the export still runs.
+ @Test
+ public void testExportEmptyJobData() throws Exception {
+
+ JobData jobData = new JobData();
+
+ JobExecution jobExecution = mock( JobExecution.class );
+
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+ when( jobExecution.getJobId() ).thenReturn( UUID.randomUUID() );
+
+ ExportJob job = new ExportJob();
+ S3Export s3Export = mock( S3Export.class );
+ //setup.getExportService().setS3Export( s3Export );
+ job.setExportService( setup.getExportService() );
+ try {
+ job.doJob( jobExecution );
+ }
+ catch ( Exception e ) {
+ assert ( false );
+ }
+ assert ( true );
+ }
+
+
+ @Test
+ public void testNullJobExecution() {
+
+ JobData jobData = new JobData();
+
+ JobExecution jobExecution = mock( JobExecution.class );
+
+ when( jobExecution.getJobData() ).thenReturn( jobData );
+ when( jobExecution.getJobId() ).thenReturn( UUID.randomUUID() );
+
+ ExportJob job = new ExportJob();
+ S3Export s3Export = mock( S3Export.class );
+ // setup.getExportService().setS3Export( s3Export );
+ job.setExportService( setup.getExportService() );
+ try {
+ job.doJob( jobExecution );
+ }
+ catch ( Exception e ) {
+ assert ( false );
+ }
+ assert ( true );
+ }
+
+
+ @Test
+ @Ignore // TODO: fix this test...
+ public void testIntegration100EntitiesOn() throws Exception {
+
+ logger.debug("testIntegration100EntitiesOn(): starting...");
+
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+ // create five applications each with collection of five entities
+
+ for ( int i = 0; i < 5; i++ ) {
+
+ ApplicationInfo appMade = setup.getMgmtSvc().createApplication( organization.getUuid(), "superapp" + i );
+ EntityManager appEm = setup.getEmf().getEntityManager( appMade.getId() );
+
+ String collName = "superappCol" + i;
+ appEm.createApplicationCollection(collName);
+
+ Map<String, Object> entityLevelProperties = null;
+ Entity[] entNotCopied;
+ entNotCopied = new Entity[5];
+
+ for ( int index = 0; index < 5; index++ ) {
+ entityLevelProperties = new LinkedHashMap<String, Object>();
+ entityLevelProperties.put( "username", "bobso" + index );
+ entityLevelProperties.put( "email", "derp" + index + "@anuff.com" );
+ entNotCopied[index] = appEm.create( collName, entityLevelProperties );
+ }
+ }
+
+ // export the organization containing those apps and collections
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ int maxRetries = 100;
+ int retries = 0;
+ while ( !exportService.getState( exportUUID ).equals( "FINISHED" ) && retries++ < maxRetries ) {
+ Thread.sleep(100);
+ }
+
+ String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR );
+ String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR );
+ Properties overrides = new Properties();
+ overrides.setProperty( "s3" + ".identity", accessId );
+ overrides.setProperty( "s3" + ".credential", secretKey );
+
+ // test that we can find the file that were exported to S3
+
+ BlobStore blobStore = null;
+ try {
+
+ final Iterable<? extends Module> MODULES = ImmutableSet.of(
+ new JavaUrlHttpCommandExecutorServiceModule(),
+ new Log4JLoggingModule(),
+ new NettyPayloadModule());
+
+ BlobStoreContext context = ContextBuilder.newBuilder("s3")
+ .credentials(accessId, secretKey)
+ .modules(MODULES)
+ .overrides(overrides)
+ .buildView(BlobStoreContext.class);
+
+ String expectedFileName = ((ExportServiceImpl) exportService)
+ .prepareOutputFileName(organization.getName(), "applications");
+
+ blobStore = context.getBlobStore();
+ if (!blobStore.blobExists(bucketName, expectedFileName)) {
+ blobStore.deleteContainer(bucketName);
+ Assert.fail("Blob does not exist: " + expectedFileName);
+ }
+ Blob bo = blobStore.getBlob(bucketName, expectedFileName);
+
+ Long numOfFiles = blobStore.countBlobs(bucketName);
+ Long numWeWant = 1L;
+ blobStore.deleteContainer(bucketName);
+ assertEquals(numOfFiles, numWeWant);
+ assertNotNull(bo);
+
+ } finally {
+ blobStore.deleteContainer(bucketName);
+ }
+ }
+
+ @Ignore("Why is this ignored?")
+ @Test
+ public void testIntegration100EntitiesForAllApps() throws Exception {
+
+ S3Export s3Export = new S3ExportImpl();
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ OrganizationInfo orgMade = null;
+ ApplicationInfo appMade = null;
+ for ( int i = 0; i < 5; i++ ) {
+ orgMade = setup.getMgmtSvc().createOrganization( "minorboss" + i, adminUser, true );
+ for ( int j = 0; j < 5; j++ ) {
+ appMade = setup.getMgmtSvc().createApplication( orgMade.getUuid(), "superapp" + j );
+
+ EntityManager customMaker = setup.getEmf().getEntityManager( appMade.getId() );
+ customMaker.createApplicationCollection( "superappCol" + j );
+ //intialize user object to be posted
+ Map<String, Object> entityLevelProperties = null;
+ Entity[] entNotCopied;
+ entNotCopied = new Entity[1];
+ //creates entities
+ for ( int index = 0; index < 1; index++ ) {
+ entityLevelProperties = new LinkedHashMap<String, Object>();
+ entityLevelProperties.put( "derp", "bacon" );
+ entNotCopied[index] = customMaker.create( "superappCol" + j, entityLevelProperties );
+ }
+ }
+ }
+
+ payload.put( "organizationId", orgMade.getUuid() );
+
+ UUID exportUUID = exportService.schedule( payload );
+ assertNotNull( exportUUID );
+
+ Thread.sleep( 3000 );
+
+ String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR );
+ String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR );
+
+ Properties overrides = new Properties();
+ overrides.setProperty( "s3" + ".identity", accessId );
+ overrides.setProperty( "s3" + ".credential", secretKey );
+
+ BlobStore blobStore = null;
+
+ try {
+ final Iterable<? extends Module> MODULES = ImmutableSet.of(
+ new JavaUrlHttpCommandExecutorServiceModule(),
+ new Log4JLoggingModule(),
+ new NettyPayloadModule() );
+
+ BlobStoreContext context = ContextBuilder.newBuilder( "s3" )
+ .credentials(accessId, secretKey )
+ .modules(MODULES )
+ .overrides(overrides )
+ .buildView(BlobStoreContext.class );
+
+ blobStore = context.getBlobStore();
+
+ //Grab Number of files
+ Long numOfFiles = blobStore.countBlobs( bucketName );
+
+ String expectedFileName = ((ExportServiceImpl)exportService)
+ .prepareOutputFileName(organization.getName(), "applications");
+
+ //delete container containing said files
+ Blob bo = blobStore.getBlob(bucketName, expectedFileName);
+ Long numWeWant = 5L;
+ blobStore.deleteContainer( bucketName );
+
+ //asserts that the correct number of files was transferred over
+ assertEquals( numWeWant, numOfFiles );
+
+ }
+ finally {
+ blobStore.deleteContainer( bucketName );
+ }
+ }
+
+
+ @Ignore("Why is this ignored")
+ @Test
+ public void testIntegration100EntitiesOnOneOrg() throws Exception {
+
+ S3Export s3Export = new S3ExportImpl();
+ ExportService exportService = setup.getExportService();
+
+ String appName = newOrgAppAdminRule.getApplicationInfo().getName();
+ HashMap<String, Object> payload = payloadBuilder(appName);
+
+ payload.put( "organizationId", organization.getUuid() );
+ payload.put( "applicationId", applicationId );
+
+ OrganizationInfo orgMade = null;
+ ApplicationInfo appMade = null;
+ for ( int i = 0; i < 100; i++ ) {
+ orgMade = setup.getMgmtSvc().createOrganization( "largerboss" + i, adminUser, true );
+ appMade = setup.getMgmtSvc().createApplication( orgMade.getUuid(), "superapp" + i );
+
+ EntityManager customMaker = setup.getEmf().getEntityManager( appMade.getId() );
+ customMaker.createApplicationCollection( "superappCol" + i );
+ //intialize user object to be posted
+ Map<String, Object> entityLevelProperties = null;
+ Entity[] entNotCopied;
+ entNotCopied = new Entity[20];
+ //creates entities
+ for ( int index = 0; index < 20; index++ ) {
+ entityLevelProperties = new LinkedHashMap<String, Object>();
+ entityLevelProperties.put( "username", "bobso" + index );
+ entityLevelProperties.put( "email", "derp" + index + "@anuff.com" );
+ entNotCopied[index] = customMaker.create( "superappCol", entityLevelProperties );
+ }
+ }
+
+ EntityManager em = setup.getEmf().getEntityManager( applicationId );
+
+ //intialize user object to be posted
+ Map<String, Object> userProperties = null;
+ Entity[] entity;
+ entity = new Entity[100];
+
+ //creates entities
+ for ( int i = 0; i < 100; i++ ) {
+ userProperties = new LinkedHashMap<String, Object>();
+ userProperties.put( "username", "bido" + i );
+ userProperties.put( "email", "bido" + i + "@anuff.com" );
+
+ entity[i] = em.create( "user", userProperties );
+ }
+
+ UUID exportUUID = exportService.schedule( payload );
+
+ while ( !exportService.getState( exportUUID ).equals( "FINISHED" ) ) {}
+
+ String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR );
+ String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR );
+
+ Properties overrides = new Properties();
+ overrides.setProperty( "s3" + ".identity", accessId );
+ overrides.setProperty( "s3" + ".credential", secretKey );
+
+ Blob bo = null;
+ BlobStore blobStore = null;
+
+ try {
+ final Iterable<? extends Module> MODULES = ImmutableSet.of( new JavaUrlHttpCommandExecutorServiceModule(),
+ new Log4JLoggingModule(), new NettyPayloadModule() );
+
+ BlobStoreContext context = ContextBuilder.newBuilder( "s3" )
+ .credentials( accessId, secretKey )
+ .modules( MODULES )
+ .overrides( overrides )
+ .buildView( BlobStoreContext.class );
+
+ String expectedFileName = ((ExportServiceImpl)exportService)
+ .prepareOutputFileName(organization.getName(), "applications");
+
+ blobStore = context.getBlobStore();
+ if ( !blobStore.blobExists( bucketName, expectedFileName ) ) {
+ assert ( false );
+ }
+ Long numOfFiles = blobStore.countBlobs( bucketName );
+ Long numWeWant = Long.valueOf( 1 );
+ assertEquals( numOfFiles, numWeWant );
+
+ bo = blobStore.getBlob( bucketName, expectedFileName );
+ }
+ catch ( Exception e ) {
+ assert ( false );
+ }
+
+ assertNotNull( bo );
+ blobStore.deleteContainer( bucketName );
+ }
+
+ public JobData jobDataCreator(HashMap<String, Object> payload,UUID exportUUID, S3Export s3Export) {
+ JobData jobData = new JobData();
+
+ jobData.setProperty( "jobName", "exportJob" );
+ jobData.setProperty( "exportInfo", payload );
+ jobData.setProperty( "exportId", exportUUID );
+ jobData.setProperty( "s3Export", s3Export );
+
+ return jobData;
+ }
+
+ /*Creates fake payload for testing purposes.*/
+ public HashMap<String, Object> payloadBuilder( String orgOrAppName ) {
+ HashMap<String, Object> payload = new HashMap<String, Object>();
+ Map<String, Object> properties = new HashMap<String, Object>();
+ Map<String, Object> storage_info = new HashMap<String, Object>();
+ storage_info.put( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR,
+ System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ) );
+ storage_info.put( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR,
+ System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR ) );
+ storage_info.put( "bucket_location", bucketName );
+
+ properties.put( "storage_provider", "s3" );
+ properties.put( "storage_info", storage_info );
+
+ payload.put( "path", orgOrAppName );
+ payload.put( "properties", properties );
+ return payload;
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-usergrid/blob/bedf5afb/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportCollectionIT.java
----------------------------------------------------------------------
diff --cc stack/services/src/test/java/org/apache/usergrid/management/importer/ImportCollectionIT.java
index d0a984d,0000000..d8e104d
mode 100644,000000..100644
--- a/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportCollectionIT.java
+++ b/stack/services/src/test/java/org/apache/usergrid/management/importer/ImportCollectionIT.java
@@@ -1,684 -1,0 +1,684 @@@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.usergrid.management.importer;
+
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.UUID;
+
+import org.jclouds.ContextBuilder;
+import org.jclouds.blobstore.BlobStore;
+import org.jclouds.blobstore.BlobStoreContext;
+import org.jclouds.blobstore.ContainerNotFoundException;
+import org.jclouds.blobstore.domain.PageSet;
+import org.jclouds.blobstore.domain.StorageMetadata;
+import org.jclouds.http.config.JavaUrlHttpCommandExecutorServiceModule;
+import org.jclouds.logging.log4j.config.Log4JLoggingModule;
+import org.jclouds.netty.config.NettyPayloadModule;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.lang.RandomStringUtils;
+import org.apache.commons.lang3.StringUtils;
+
+import org.apache.usergrid.NewOrgAppAdminRule;
+import org.apache.usergrid.ServiceITSetup;
+import org.apache.usergrid.ServiceITSetupImpl;
+import org.apache.usergrid.batch.service.JobSchedulerService;
+import org.apache.usergrid.cassandra.ClearShiroSubject;
+import org.apache.usergrid.management.OrganizationInfo;
+import org.apache.usergrid.management.UserInfo;
+import org.apache.usergrid.management.export.ExportService;
+import org.apache.usergrid.persistence.ConnectionRef;
+import org.apache.usergrid.persistence.Entity;
+import org.apache.usergrid.persistence.EntityManager;
+import org.apache.usergrid.persistence.EntityRef;
+import org.apache.usergrid.persistence.Results;
+import org.apache.usergrid.persistence.SimpleEntityRef;
+import org.apache.usergrid.persistence.entities.FileImport;
+import org.apache.usergrid.persistence.entities.Import;
+import org.apache.usergrid.persistence.index.query.Query;
+import org.apache.usergrid.persistence.index.query.Query.Level;
+import org.apache.usergrid.setup.ConcurrentProcessSingleton;
+
+import com.amazonaws.SDKGlobalConfiguration;
+import com.google.common.collect.ImmutableSet;
+import com.google.common.util.concurrent.Service;
+import com.google.inject.Module;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+
+public class ImportCollectionIT {
+
+ private static final Logger logger = LoggerFactory.getLogger(ImportCollectionIT.class);
+
+ // app-level data generated only once
+ private static UserInfo adminUser;
+ private static OrganizationInfo organization;
+ private static UUID applicationId;
+
+ private static String bucketPrefix;
+
+ private String bucketName;
+
+
+ @Rule
+ public ClearShiroSubject clearShiroSubject = new ClearShiroSubject();
+
+ @ClassRule
+ public static final ServiceITSetup setup = new ServiceITSetupImpl( );
+
+ @Rule
+ public NewOrgAppAdminRule newOrgAppAdminRule = new NewOrgAppAdminRule( setup );
+
+
+ @BeforeClass
+ public static void setup() throws Exception {
+
+ bucketPrefix = System.getProperty( "bucketName" );
+
+ // start the scheduler after we're all set up
+ JobSchedulerService jobScheduler = ConcurrentProcessSingleton.getInstance()
+ .getSpringResource().getBean( JobSchedulerService.class );
+
+ if ( jobScheduler.state() != Service.State.RUNNING ) {
- jobScheduler.startAndWait();
++ jobScheduler.startAsync();
++ jobScheduler.awaitRunning();
+ }
-
+ }
+
+
+ @AfterClass
+ public static void tearDown() {
+ if ( !StringUtils.isEmpty( bucketPrefix )) {
+ deleteBucketsWithPrefix();
+ }
+ }
+
+
+ @Before
+ public void before() {
+
+ boolean configured =
+ !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR))
+ && !StringUtils.isEmpty(System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR))
+ && !StringUtils.isEmpty(System.getProperty("bucketName"));
+
+ if ( !configured ) {
+ logger.warn("Skipping test because {}, {} and bucketName not " +
+ "specified as system properties, e.g. in your Maven settings.xml file.",
+ new Object[] {
+ SDKGlobalConfiguration.SECRET_KEY_ENV_VAR,
+ SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR
+ });
+ }
+
+ Assume.assumeTrue( configured );
+
+ adminUser = newOrgAppAdminRule.getAdminInfo();
+ organization = newOrgAppAdminRule.getOrganizationInfo();
+ applicationId = newOrgAppAdminRule.getApplicationInfo().getId();
+
+
+ bucketName = bucketPrefix + RandomStringUtils.randomAlphanumeric(10).toLowerCase();
+ }
+
+
+ @After
+ public void after() throws Exception {
+// if (listener != null) {
+// listener.stop();
+// listener = null;
+// }
+ }
+
+
+ // test case to check if a collection file is imported correctly
+ @Test
+ public void testExportImportCollection() throws Exception {
+
+ // create a collection of "thing" entities in the first application, export to S3
+ try {
+
+ final UUID targetAppId = setup.getMgmtSvc().createApplication(
+ organization.getUuid(), "target" + RandomStringUtils.randomAlphanumeric(10)).getId();
+
+ final EntityManager emApp1 = setup.getEmf().getEntityManager( targetAppId );
+ Map<UUID, Entity> thingsMap = new HashMap<>();
+ List<Entity> things = new ArrayList<>();
+ createTestEntities(emApp1, thingsMap, things, "thing");
+
+ deleteBucket();
+ exportCollection( emApp1, "things" );
+
+ // create new second application, import the data from S3
+
+ final UUID appId2 = setup.getMgmtSvc().createApplication(
+ organization.getUuid(), "second" + RandomStringUtils.randomAlphanumeric(10)).getId();
+
+ final EntityManager emApp2 = setup.getEmf().getEntityManager(appId2);
+ importCollections(emApp2);
+
+ // make sure that it worked
+
+ logger.debug("\n\nCheck connections\n");
+
+ List<Entity> importedThings = emApp2.getCollection(
+ appId2, "things", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue( !importedThings.isEmpty() );
+
+ // two things have connections
+
+ int conCount = 0;
+ for ( Entity e : importedThings ) {
+ Results r = emApp2.getConnectedEntities( e, "related", null, Level.IDS);
+ List<ConnectionRef> connections = r.getConnections();
+ conCount += connections.size();
+ }
+ assertEquals( 2, conCount );
+
+ logger.debug("\n\nCheck dictionaries\n");
+
+ // first two items have things in dictionary
+
+ EntityRef entity0 = importedThings.get(0);
+ Map connected0 = emApp2.getDictionaryAsMap(entity0, "connected_types");
+ Map connecting0 = emApp2.getDictionaryAsMap(entity0, "connected_types");
+ Assert.assertEquals( 1, connected0.size() );
+ Assert.assertEquals( 1, connecting0.size() );
+
+ EntityRef entity1 = importedThings.get(1);
+ Map connected1 = emApp2.getDictionaryAsMap(entity1, "connected_types");
+ Map connecting1 = emApp2.getDictionaryAsMap(entity1, "connected_types");
+ Assert.assertEquals( 1, connected1.size() );
+ Assert.assertEquals( 1, connecting1.size() );
+
+ // the rest rest do not have connections
+
+ EntityRef entity2 = importedThings.get(2);
+ Map connected2 = emApp2.getDictionaryAsMap(entity2, "connected_types");
+ Map connecting2 = emApp2.getDictionaryAsMap(entity2, "connected_types");
+ Assert.assertEquals( 0, connected2.size() );
+ Assert.assertEquals( 0, connecting2.size() );
+
+ // if entities are deleted from app1, they still exist in app2
+
+ logger.debug("\n\nCheck dictionary\n");
+ for ( Entity importedThing : importedThings ) {
+ emApp1.delete( importedThing );
+ }
+ emApp1.refreshIndex();
+ emApp2.refreshIndex();
+
+ importedThings = emApp2.getCollection(
+ appId2, "things", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue( !importedThings.isEmpty() );
+
+ } finally {
+ deleteBucket();
+ }
+ }
+
+
+ /**
+ * Test that an existing collection of entities can be updated
+ * by doing an import of entities identified by UUIDs.
+ */
+ @Test
+ public void testUpdateByImport() throws Exception {
+
+ // create collection of things in first application, export them to S3
+
+ final UUID targetAppId = setup.getMgmtSvc().createApplication(
+ organization.getUuid(), "target" + RandomStringUtils.randomAlphanumeric(10)).getId();
+
+ final EntityManager emApp1 = setup.getEmf().getEntityManager( targetAppId );
+
+ Map<UUID, Entity> thingsMap = new HashMap<>();
+ List<Entity> things = new ArrayList<>();
+ createTestEntities(emApp1, thingsMap, things, "thing");
+
+ deleteBucket();
+
+ try {
+ exportCollection(emApp1, "things");
+
+ // create new second application and import those things from S3
+
+ final UUID appId2 = setup.getMgmtSvc().createApplication(
+ organization.getUuid(), "second" + RandomStringUtils.randomAlphanumeric(10)).getId();
+
+ final EntityManager emApp2 = setup.getEmf().getEntityManager(appId2);
+ importCollections(emApp2);
+
+
+ // update the things in the second application, export to S3
+
+ for (UUID uuid : thingsMap.keySet()) {
+ Entity entity = emApp2.get(uuid);
+ entity.setProperty("fuel_source", "Hydrogen");
+ emApp2.update(entity);
+ }
+
+ deleteBucket();
+ exportCollection(emApp2, "things");
+
+
+ // import the updated things back into the first application, check that they've been updated
+
+ importCollections(emApp1);
+
+ for (UUID uuid : thingsMap.keySet()) {
+ Entity entity = emApp1.get(uuid);
+ Assert.assertEquals("Hydrogen", entity.getProperty("fuel_source"));
+ }
+
+ } finally {
+ deleteBucket();
+ }
+ }
+
+
+ /**
+ * Simple import test but with multiple files.
+ */
+ @Test
+ public void testImportWithMultipleFiles() throws Exception {
+
+ deleteBucket();
+
+ try {
+
+ String targetAppName = "import-test-target-" + RandomStringUtils.randomAlphanumeric(10);
+ UUID targetAppId = setup.getMgmtSvc().createApplication(organization.getUuid(), targetAppName).getId();
+
+ // create 4 applications each with collection of 10 things, export all to S3
+ logger.debug("\n\nCreating 10 applications with 10 entities each\n");
+
+ for (int i = 0; i < 10; i++) {
+
+ String appName = "import-test-" + i + RandomStringUtils.randomAlphanumeric(10);
+ UUID appId = setup.getMgmtSvc().createApplication(organization.getUuid(), appName).getId();
+ EntityManager emApp = setup.getEmf().getEntityManager(appId);
+
+ Map<UUID, Entity> thingsMap = new HashMap<>();
+ List<Entity> things = new ArrayList<>();
+ createTestEntities(emApp, thingsMap, things, "thing");
+
+ exportCollection(emApp, "things");
+ }
+
+ // import all those exports from S3 into the default test application
+ logger.debug("\n\nImporting\n");
+
+ final EntityManager emDefaultApp = setup.getEmf().getEntityManager(targetAppId);
+ importCollections(emDefaultApp);
+
+ // we should now have 100 Entities in the default app
+
+ logger.debug("\n\nQuery to see if we now have 100 entities\n");
+
+ Query query = Query.fromQL("select *").withLimit(101);
+
+ List<Entity> importedThings = emDefaultApp.getCollection(
+ emDefaultApp.getApplicationId(), "things", query, Level.ALL_PROPERTIES).getEntities();
+
+ assertNotNull("importedThings must not be null", !importedThings.isEmpty());
+ assertTrue("importedThings must not be empty", !importedThings.isEmpty());
+ assertEquals("there must be 100 importedThings", 100, importedThings.size());
+
+ } finally {
+ deleteBucket();
+ }
+ }
+
+
+ /**
+ * TODO: Test that importing bad JSON will result in an informative error message.
+ */
+ @Test
+ public void testImportBadJson() throws Exception {
+
+ deleteBucket();
+
+ // export and upload a bad JSON file to the S3 bucket
+
+ File cwd = new File(".");
+ String basePath = System.getProperty("target.directory")
+ + File.separator + "test-classes" + File.separator;
+
+ List<String> filenames = new ArrayList<>( 1 );
+ filenames.add( basePath + "testimport-bad-json.json");
+
+ S3Upload s3Upload = new S3Upload();
+ s3Upload.copyToS3(
+ System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR),
+ System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR),
+ bucketName, filenames );
+
+ // import bad JSON from from the S3 bucket
+
+ String appName = "import-test-" + RandomStringUtils.randomAlphanumeric(10);
+ UUID appId = setup.getMgmtSvc().createApplication(organization.getUuid(), appName).getId();
+
+ final EntityManager em = setup.getEmf().getEntityManager( appId );
+ UUID importId = importCollections(em);
+
+
+ // check that we got an informative error message back
+
+ List<Entity> importedThings = em.getCollection(
+ em.getApplicationId(), "things", null, Level.ALL_PROPERTIES).getEntities();
+
+ assertTrue("No entities should have been imported", importedThings.isEmpty());
+
+ ImportService importService = setup.getImportService();
+ Results results = importService.getFileImports( appId, importId, null, null );
+
+ assertEquals( "There is one", 1, results.size() );
+
+ assertEquals( "Entity is FileImport object",
+ FileImport.class, results.getEntity().getClass() );
+
+ FileImport fileImport = (FileImport)results.getEntity();
+
+ assertTrue( fileImport.getFileName().endsWith("testimport-bad-json.json"));
+
+ assertTrue( "Error message is correct",
+ fileImport.getErrorMessage().startsWith("Unexpected character ('<' (code 60))"));
+ }
+
+ @Test
+ public void testImportWithMultipleFilesSomeBad() throws Exception {
+
+ deleteBucket();
+
+ // upload good and badly formatted files to our S3 bucket
+
+ String basePath = System.getProperty("target.directory")
+ + File.separator + "test-classes" + File.separator;
+
+ List<String> filenames = new ArrayList<>( 3 );
+ filenames.add( basePath + "testimport-with-connections.json" );
+ filenames.add( basePath + "testimport-qtmagics.json" );
+ filenames.add( basePath + "testimport-bad-connection.json" );
+ filenames.add( basePath + "testimport-bad-json.json" );
+
+ S3Upload s3Upload = new S3Upload();
+ s3Upload.copyToS3(
+ System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR),
+ System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR),
+ bucketName, filenames );
+
+ // import all those files into the default test application
+
+ String targetAppName = "import-test-target-" + RandomStringUtils.randomAlphanumeric(10);
+ UUID targetAppId = setup.getMgmtSvc().createApplication(organization.getUuid(), targetAppName).getId();
+
+ final EntityManager emDefaultApp = setup.getEmf().getEntityManager( targetAppId );
+ UUID importId = importCollections(emDefaultApp);
+
+ {
+ List<Entity> importedThings = emDefaultApp.getCollection(
+ emDefaultApp.getApplicationId(), "connfails", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue( !importedThings.isEmpty());
+ assertEquals( 1, importedThings.size() );
+ }
+
+ {
+ List<Entity> importedThings = emDefaultApp.getCollection(
+ emDefaultApp.getApplicationId(), "qtmagics", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue(!importedThings.isEmpty());
+ assertEquals(5, importedThings.size());
+ }
+
+ {
+ List<Entity> importedThings = emDefaultApp.getCollection(
+ emDefaultApp.getApplicationId(), "badjsons", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue(!importedThings.isEmpty());
+ assertEquals( 4, importedThings.size() );
+ }
+
+ {
+ List<Entity> importedThings = emDefaultApp.getCollection(
+ emDefaultApp.getApplicationId(), "things", null, Level.ALL_PROPERTIES).getEntities();
+ assertTrue(!importedThings.isEmpty());
+ assertEquals( 10, importedThings.size() );
+ }
+
+ Thread.sleep(3000);
+
+ ImportService importService = setup.getImportService();
+ Results results = importService.getFileImports( targetAppId, importId, null, null );
+
+ assertEquals( "There four file imports", 4, results.size() );
+
+ }
+
+
+ //---------------------------------------------------------------------------------------------
+
+
+ /**
+ * Start import job that will import all collections in all data files in the S3 bucket.
+ */
+ private UUID importCollections(final EntityManager em) throws Exception {
+
+ logger.debug("\n\nImport into new app {}\n", em.getApplication().getName() );
+
+ final ImportService importService = setup.getImportService();
+
+ final Import importEntity = importService.schedule(em.getApplication().getUuid(),
+ new HashMap<String, Object>() {{
+ put( "path", organization.getName() + em.getApplication().getName() );
+ put( "organizationId", organization.getUuid() );
+ put( "applicationId", em.getApplication().getUuid() );
+ put( "properties", new HashMap<String, Object>() {{
+ put( "storage_provider", "s3" );
+ put( "storage_info", new HashMap<String, Object>() {{
+ put( "s3_access_id",
+ System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR) );
+ put( "s3_key",
+ System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ) );
+ put( "bucket_location", bucketName );
+ }} );
+ }} );
+ }});
+
+ int maxRetries = 30;
+ int retries = 0;
+ Import.State state = importService.getState(importEntity.getUuid());
+ while ( !state.equals( Import.State.FINISHED )
+ && !state.equals( Import.State.FAILED )
+ && retries++ < maxRetries ) {
+
+ logger.debug("Waiting for import ({}) ...", state.toString());
+ Thread.sleep(1000);
+
+ state = importService.getState(importEntity.getUuid());
+ }
+
+ if ( retries >= maxRetries ) {
+ throw new RuntimeException("Max retries reached");
+ }
+ em.refreshIndex();
+
+ return importEntity.getUuid();
+ }
+
+
+ /**
+ * Start export job that wilk export a specific collection to the S3 bucket.
+ */
+ private void exportCollection(
+ final EntityManager em, final String collectionName ) throws Exception {
+
+ logger.debug("\n\nExporting {} collection from application {}\n",
+ collectionName, em.getApplication().getName() );
+
+ em.refreshIndex();
+
+ ExportService exportService = setup.getExportService();
+ UUID exportUUID = exportService.schedule( new HashMap<String, Object>() {{
+ put( "path", organization.getName() + em.getApplication().getName());
+ put( "organizationId", organization.getUuid());
+ put( "applicationId", em.getApplication().getUuid() );
+ put( "collectionName", collectionName);
+ put( "properties", new HashMap<String, Object>() {{
+ put( "storage_provider", "s3" );
+ put( "storage_info", new HashMap<String, Object>() {{
+ put( "s3_access_id",
+ System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR) );
+ put("s3_key",
+ System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR));
+ put( "bucket_location", bucketName );
+ }});
+ }});
+ }});
+
+ int maxRetries = 30;
+ int retries = 0;
+ while ( !exportService.getState( exportUUID ).equals( "FINISHED" ) && retries++ < maxRetries ) {
+ logger.debug("Waiting for export...");
+ Thread.sleep(1000);
+ }
+
+ if ( retries >= maxRetries ) {
+ throw new RuntimeException("Max retries reached");
+ }
+ }
+
+
+ /**
+ * Create test entities of a specified type.
+ * First two entities are connected.
+ */
+ private void createTestEntities(final EntityManager em,
+ Map<UUID, Entity> thingsMap, List<Entity> things, final String type) throws Exception {
+
+ logger.debug("\n\nCreating new {} collection in application {}\n",
+ type, em.getApplication().getName());
+
+ em.refreshIndex();
+
+ List<Entity> created = new ArrayList<>();
+ for (int i = 0; i < 10; i++) {
+ final int count = i;
+ Entity e = em.create(type, new HashMap<String, Object>() {{
+ put("name", em.getApplication().getName() + "-" + type + "-" + count);
+ put("originalAppId", em.getApplication().getUuid());
+ put("originalAppName", em.getApplication().getName());
+ }});
+ thingsMap.put(e.getUuid(), e);
+ things.add(e);
+ created.add(e);
+ }
+
+ // first two things are related to each other
+ em.createConnection(new SimpleEntityRef(type, created.get(0).getUuid()),
+ "related", new SimpleEntityRef(type, created.get(1).getUuid()));
+ em.createConnection(new SimpleEntityRef(type, created.get(1).getUuid()),
+ "related", new SimpleEntityRef(type, created.get(0).getUuid()));
+
+ em.refreshIndex();
+ }
+
+
+ /**
+ * Delete the configured s3 bucket.
+ */
+ public void deleteBucket() {
+
+ logger.debug("\n\nDelete bucket\n");
+
+ String accessId = System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR);
+ String secretKey = System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR);
+
+ Properties overrides = new Properties();
+ overrides.setProperty("s3" + ".identity", accessId);
+ overrides.setProperty("s3" + ".credential", secretKey);
+
+ final Iterable<? extends Module> MODULES = ImmutableSet
+ .of(new JavaUrlHttpCommandExecutorServiceModule(),
+ new Log4JLoggingModule(),
+ new NettyPayloadModule());
+
+ BlobStoreContext context =
+ ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES)
+ .overrides(overrides).buildView(BlobStoreContext.class);
+
+ BlobStore blobStore = context.getBlobStore();
+ blobStore.deleteContainer( bucketName );
+ }
+
+
+ // might be handy if you need to clean up buckets
+ private static void deleteBucketsWithPrefix() {
+
+ logger.debug("\n\nDelete buckets with prefix {}\n", bucketPrefix );
+
+ String accessId = System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR);
+ String secretKey = System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR);
+
+ Properties overrides = new Properties();
+ overrides.setProperty("s3" + ".identity", accessId);
+ overrides.setProperty("s3" + ".credential", secretKey);
+
+ final Iterable<? extends Module> MODULES = ImmutableSet
+ .of(new JavaUrlHttpCommandExecutorServiceModule(),
+ new Log4JLoggingModule(),
+ new NettyPayloadModule());
+
+ BlobStoreContext context =
+ ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES)
+ .overrides(overrides).buildView(BlobStoreContext.class);
+
+ BlobStore blobStore = context.getBlobStore();
+ final PageSet<? extends StorageMetadata> blobStoreList = blobStore.list();
+
+ for ( Object o : blobStoreList.toArray() ) {
+ StorageMetadata s = (StorageMetadata)o;
+
+ if ( s.getName().startsWith( bucketPrefix )) {
+ try {
+ blobStore.deleteContainer(s.getName());
+ } catch ( ContainerNotFoundException cnfe ) {
+ logger.warn("Attempted to delete bucket {} but it is already deleted", cnfe );
+ }
+ logger.debug("Deleted bucket {}", s.getName());
+ }
+ }
+ }
+}