You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@openwhisk.apache.org by GitBox <gi...@apache.org> on 2018/11/06 16:33:27 UTC

[GitHub] mhamann closed pull request #226: Enable Cassandra as a backend driver

mhamann closed pull request #226: Enable Cassandra as a backend driver
URL: https://github.com/apache/incubator-openwhisk-apigateway/pull/226
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/.travis.yml b/.travis.yml
index 337bdce..10e687b 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -17,7 +17,9 @@ env:
 
 before_install:
   - ./tools/travis/setup.sh
+  - docker run -p 7000:7000 -p 7001:7001 -p 7199:7199 -p 9160:9160 -p 9042:9042 -d cassandra
   - pip install hererocks
+  - pip install cqlsh
   - hererocks lua_install -r^ --$LUA
   - export PATH=$PATH:$PWD/lua_install/bin # Add directory with all installed binaries to PATH
 
@@ -28,6 +30,7 @@ install:
 script:
   - busted --output=TAP --helper=set_paths --pattern=.lua scripts
   - cd ..
+  - ./tools/cassandra/setup.sh
   - ./tools/travis/build.sh
 
 deploy:
diff --git a/Dockerfile b/Dockerfile
index 7c2e835..f64f1e3 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -36,7 +36,7 @@ RUN  echo " ... adding Openresty, NGINX, NAXSI and PCRE" \
      && tar -zxf ./openresty-${OPENRESTY_VERSION}.tar.gz \
      && tar -zxf ./pcre-${PCRE_VERSION}.tar.gz \
      && tar -zxf ./naxsi-${NAXSI_VERSION}.tar.gz \
-     && cd /tmp/api-gateway/openresty-${OPENRESTY_VERSION} \ 
+     && cd /tmp/api-gateway/openresty-${OPENRESTY_VERSION} \
 
      && echo "        - building debugging version of the api-gateway ... " \
      && ./configure \
@@ -181,7 +181,14 @@ RUN echo " ... installing lua-resty-lrucache... " \
             INSTALL=${_prefix}/api-gateway/bin/resty-install \
     && rm -rf /tmp/api-gateway
 
-
+ENV LUA_CASSANDRA_VERSION 1.2.2
+RUN echo " ... installing lua-resty-cassandra... " \
+    && mkdir -p /tmp/api-gateway \
+    && curl -k -L https://github.com/thibaultcha/lua-cassandra/archive/${LUA_CASSANDRA_VERSION}.tar.gz -o /tmp/api-gateway/lua-cassandra-${LUA_CASSANDRA_VERSION}.tar.gz \
+    && tar -xf /tmp/api-gateway/lua-cassandra-${LUA_CASSANDRA_VERSION}.tar.gz -C /tmp/api-gateway \
+    && cd /tmp/api-gateway/lua-cassandra-${LUA_CASSANDRA_VERSION} \
+    && cp -r lib/* /usr/local/api-gateway/lualib \
+    && rm -rf /tmp/api-gateway
 
 ENV NETURL_LUA_VERSION 0.9-1
 RUN echo " ... installing neturl.lua ... " \
diff --git a/Makefile b/Makefile
index 0c4b37b..7170d1b 100644
--- a/Makefile
+++ b/Makefile
@@ -18,7 +18,7 @@ profile-build:
 	docker build -t openwhisk/apigateway-profiling -f Dockerfile.profiling .
 
 .PHONY: profile-run
-profile-run: profile-build 
+profile-run: profile-build
 	docker run --rm --name="apigateway" --privileged -p 80:80 -p ${PUBLIC_MANAGEDURL_PORT}:8080 -p 9000:9000 \
 		-e PUBLIC_MANAGEDURL_HOST=${PUBLIC_MANAGEDURL_HOST} -e PUBLIC_MANAGEDURL_PORT=${PUBLIC_MANAGEDURL_PORT} \
 		-e REDIS_HOST=${REDIS_HOST} -e REDIS_PORT=${REDIS_PORT} -e REDIS_PASS=${REDIS_PASS} \
@@ -42,6 +42,21 @@ docker-run:
 		-e TOKEN_GITHUB_URL=https://api.github.com/user \
 		openwhisk/apigateway:latest
 
+
+.PHONY: cassandra-run
+cassandra-run:
+	docker run --rm --name="apigateway" -p 80:80 -p ${PUBLIC_MANAGEDURL_PORT}:8080 -p 9000:9000 \
+		-e PUBLIC_MANAGEDURL_HOST=${PUBLIC_MANAGEDURL_HOST} -e PUBLIC_MANAGEDURL_PORT=${PUBLIC_MANAGEDURL_PORT} \
+		-e REDIS_HOST=${REDIS_HOST} -e REDIS_PORT=${REDIS_PORT} -e REDIS_PASS=${REDIS_PASS} \
+		-e TOKEN_GOOGLE_URL=https://www.googleapis.com/oauth2/v3/tokeninfo \
+	 	-e TOKEN_FACEBOOK_URL=https://graph.facebook.com/debug_token \
+		-e TOKEN_GITHUB_URL=https://api.github.com/user \
+		-e DATASTORE=cassandra \
+		-e CASSANDRA_HOST=172.17.0.1 \
+		-e CASSANDRA_PORT=9042 \
+		-e CASSANDRA_KEYSPACE=apigateway \
+		openwhisk/apigateway:latest
+
 .PHONY: docker-debug
 docker-debug:
 	#Volumes directories must be under your Users directory
diff --git a/api-gateway.conf b/api-gateway.conf
index e206e48..66985d1 100644
--- a/api-gateway.conf
+++ b/api-gateway.conf
@@ -34,6 +34,10 @@ env PUBLIC_MANAGEDURL_PORT;
 env HOST;
 env PORT;
 
+env DATASTORE;
+env CASSANDRA_HOST;
+env CASSANDRA_PORT;
+env CASSANDRA_KEYSPACE;
 
 env REDIS_RETRY_COUNT;
 
@@ -60,6 +64,7 @@ pcre_jit on;
 http {
     default_type  text/plain;
 
+    lua_shared_dict cassandra 1m;
     # Set in-memory buffer size
     client_body_buffer_size 1M;
     client_max_body_size 1M;
diff --git a/conf.d/managed_endpoints.conf b/conf.d/managed_endpoints.conf
index ba9c94e..554fd41 100644
--- a/conf.d/managed_endpoints.conf
+++ b/conf.d/managed_endpoints.conf
@@ -59,7 +59,6 @@ server {
             ngx.say("You have hit the api gateway managed endpoints.")
         ';
     }
-
     location ~ "^/api/([a-zA-Z0-9\-]+)/([a-zA-Z0-9\-\/\-\_\{\} ]+)(\\b)" {
         set $upstream https://172.17.0.1;
         set $tenant $1;
diff --git a/init.sh b/init.sh
index 6a41aa1..0f41169 100755
--- a/init.sh
+++ b/init.sh
@@ -51,12 +51,12 @@ api-gateway -t -p /usr/local/api-gateway/ -c /etc/api-gateway/api-gateway.conf
 echo "   ... using log level: '${log_level}'. Override it with -e 'LOG_LEVEL=<level>' "
 api-gateway -p /usr/local/api-gateway/ -c /etc/api-gateway/api-gateway.conf -g "daemon off; error_log /dev/stderr ${log_level};" &
 
-if [[ -n "${redis_host}" && -n "${redis_port}" ]]; then
-    sleep 1  # sleep until api-gateway is set up
+#if [[ -n "${redis_host}" && -n "${redis_port}" ]]; then
+#    sleep 1  # sleep until api-gateway is set up
     tail -f /var/log/api-gateway/access.log -f /var/log/api-gateway/error.log \
          -f /var/log/api-gateway/gateway_error.log -f /var/log/api-gateway/management.log
-else
-    echo "REDIS_HOST and/or REDIS_PORT not defined"
-fi
+#else
+#    echo "REDIS_HOST and/or REDIS_PORT not defined"
+#fi
 
 
diff --git a/scripts/lua/lib/cassandra.lua b/scripts/lua/lib/cassandra.lua
new file mode 100644
index 0000000..962e56d
--- /dev/null
+++ b/scripts/lua/lib/cassandra.lua
@@ -0,0 +1,317 @@
+local CASSANDRA_KEYSPACE = os.getenv('CASSANDRA_KEYSPACE')
+local CASSANDRA_HOST = os.getenv('CASSANDRA_HOST')
+local CASSANDRA_PORT = os.getenv('CASSANDRA_PORT')
+local request = require 'lib/request'
+local cjson = require 'cjson'
+local utils = require 'lib/utils'
+local _M = {}
+local started = true
+function _M.setKeyspace(keyspace)
+  CASSANDRA_KEYSPACE = keyspace
+end
+
+function _M.init()
+  local Cluster = require 'resty.cassandra.cluster'
+  if not started then
+    local cluster, err = Cluster.new({
+      shm = 'cassandra',
+      contact_points = { utils.concatStrings({CASSANDRA_HOST})},
+      keyspace = 'system',
+      timeout_read = 1000
+     })
+    if err then
+      request.err(503, utils.concatStrings({'Unable to communicate with cassandra cluster: ', cjson.encode(err)}))
+    end
+    _M.createTables(cluster)
+  end
+  local cluster, err = Cluster.new({
+    shm = 'cassandra',
+    contact_points = { CASSANDRA_HOST},
+    keyspace = CASSANDRA_KEYSPACE,
+    timeout_read = 1000
+  })
+  return cluster
+end
+
+function _M.addAPI(session, id, apiObj, existingAPI)
+  if existingAPI == nil then
+    local apis = _M.getAllAPIs(session)
+    for apiId, obj in pairs(apis) do
+      if apiId%2 == 0 then
+        obj = cjson.decode(obj)
+        if obj.tenantId == apiObj.tenantId and obj.basePath == apiObj.basePath then
+          request.err(500, "basePath not unique for given tenant.")
+        end
+      end
+    end
+  else -- emulate the logic in redis.lua, just delete all the resources for a given api
+    local basePath = existingAPI.basePath:sub(2)
+    for path, v in pairs(existingAPI.resources) do
+      local gatewayPath = ngx.unescape_uri(utils.concatStrings({basePath, ngx.escape_uri(path)}))
+      gatewayPath = gatewayPath:sub(1,1) == "/" and gatewayPath:sub(2) or gatewayPath
+      local redisKey = utils.concatStrings({"resources:", existingAPI.tenantId, ":", gatewayPath})
+      _M.deleteResource(red, redisKey, REDIS_FIELD)
+    end
+  end
+  local tenantId = apiObj.tenantId
+  apiObj = cjson.encode(apiObj)
+  local ok, err = session:execute(string.format("INSERT into %s.api (api_id, tenant_id, value) VALUES ('%s', '%s', '%s')", CASSANDRA_KEYSPACE, id, tenantId, apiObj))
+  if err then
+    request.err(500, 'Failed to save api: ' .. err)
+  end
+  return cjson.decode(apiObj)
+end
+
+function _M.getAllAPIs(session)
+  local rows, err = session:execute(string.format("SELECT * from %s.api", CASSANDRA_KEYSPACE))
+  local result = {}
+  for _, v in ipairs(rows) do
+    table.insert(result, v['api_id'])
+    table.insert(result, v['value'])
+  end
+  return result
+end
+
+function _M.getAPI(session, id)
+  local rows, err = session:execute(string.format("SELECT * from %s.api where api_id='%s'", CASSANDRA_KEYSPACE, id))
+  if err then
+    request.err(500, utils.concatStrings({'Error getting api: ', err}))
+  end
+  for _,v in ipairs(rows) do
+    return v.value
+  end
+end
+
+function _M.deleteAPI(session, id)
+  local ok, err = session:execute(string.format("DELETE from %s.api where api_id='%s'", CASSANDRA_KEYSPACE, id))
+  if err then
+    request.err(500, utils.concatStrings({'Error deleting api: ', err}))
+  end
+end
+
+function _M.resourceToApi(session, id)
+  local spl = _M.stringSplit(id)
+  local tenantId = spl[1]
+  local gatewayPath = spl[2]
+  local rows, err = session:execute(string.format("SELECT api_id FROM %s.resource where tenant_id='%s' and resource_path='%s'", CASSANDRA_KEYSPACE,tenantId, gatewayPath))
+  if err then
+    request.err(500, utils.concatStrings({'Error resolving resource to api: ', err}))
+  end
+  for _, v in ipairs(rows) do
+    return v.api_id
+  end
+end
+
+function _M.generateResourceObj(ops, apiId, tenantObj, cors)
+  local resourceObj = {
+    operations = {}
+  }
+  for op, v in pairs(ops) do
+    op = op:upper()
+    resourceObj.operations[op] = {
+      backendUrl = v.backendUrl,
+      backendMethod = v.backendMethod
+    }
+    if v.policies then
+      resourceObj.operations[op].policies = v.policies
+    end
+    if v.security then
+      resourceObj.operations[op].security = v.security
+    end
+  end
+  if cors then
+    resourceObj.cors = cors
+  end
+  if apiId then
+    resourceObj.apiId = apiId
+  end
+  if tenantObj then
+    resourceObj.tenantId = tenantObj.id
+    resourceObj.tenantNamespace = tenantObj.namespace
+    resourceObj.tenantInstance = tenantObj.instance
+  end
+  return cjson.encode(resourceObj)
+
+end
+
+
+function _M.createResource(session, key, field, resourceObj)
+  local tenantId = _M.stringSplit(key)[1]
+  local resourcePath = _M.stringSplit(key)[2]
+  local apiId = cjson.decode(resourceObj).apiId
+  local ok, err = session:execute(string.format("INSERT into %s.resource (tenant_id, resource_path, api_id, value) VALUES ('%s', '%s', '%s', '%s')", CASSANDRA_KEYSPACE, tenantId, resourcePath, apiId, resourceObj))
+
+  if err then
+    request.err(500, utils.concatStrings({'Failed to create resource: ' .. err}))
+  end
+end
+
+function _M.addResourceToIndex(session, index, resourceKey)
+  return nil -- we don't need indexes
+end
+
+function _M.deleteResourceFromIndex(session, index, resourceKey)
+  return nil
+end
+
+
+function _M.getResource(session, key, field)
+  local tenantId = _M.stringSplit(key)[1]
+  local resourcePath = _M.stringSplit(key)[2]
+  local rows, err = session:execute(string.format("SELECT value from %s.resource WHERE tenant_id='%s' and resource_path='%s'", CASSANDRA_KEYSPACE, tenantId, resourcePath))
+  for _, v in ipairs(rows) do
+    return v.value
+  end
+end
+
+function _M.getAllResources(session, tenantId)
+  local data = session:execute(string.format("SELECT resource_path from %s.resource WHERE tenant_id='%s'", CASSANDRA_KEYSPACE, tenantId))
+  local result = {}
+  for _, v in ipairs(data) do
+    table.insert(result, utils.concatStrings({'resources:', tenantId, ':', v['resource_path']})) -- emulate the redis behavior
+  end
+  return result
+end
+
+function _M.deleteResource(session, key, field)
+  local tenantId = _M.stringSplit(key)[1]
+  local resourcePath = _M.stringSplit(key)[2]
+  local ok, err = session:execute(string.format("DELETE from %s.resource WHERE tenant_id='%s' and resource_path='%s'", CASSANDRA_KEYSPACE, tenantId, resourcePath))
+  if err then
+    request.err(500, 'Failed to delete resource: ' .. err)
+  end
+end
+
+function _M.addTenant(session, id, tenantObj)
+  local tenants = _M.getAllTenants(session)
+  for tenantId, obj in pairs(tenants) do
+    if tenantId%2 == 0 then
+      obj = cjson.decode(obj)
+      if obj.namespace == tenantObj.namespace and obj.instance == tenantObj.instance then
+        return cjson.encode(obj)
+      end
+    end
+  end
+  tenantObj = cjson.encode(tenantObj)
+  local ok, err = session:execute(string.format("INSERT into %s.tenant (tenant_id, value) VALUES ('%s', '%s')", CASSANDRA_KEYSPACE, id, tenantObj))
+  if err then
+    request.err(500, 'Error creating tenant: ' .. cjson.encode(err))
+  end
+  return tenantObj
+end
+
+function _M.getAllTenants(session)
+  local rows, err = session:execute(string.format("SELECT * FROM %s.tenant", CASSANDRA_KEYSPACE))
+  local result = {}
+  if rows == nil then
+    return {}
+  end
+  for _, v in ipairs(rows) do
+    table.insert(result, v['tenant_id'])
+    table.insert(result, v['value'])
+  end
+  return result
+end
+
+function _M.getTenant(session, id)
+  local rows, err = session:execute(string.format("SELECT value FROM %s.tenant where tenant_id='%s'", CASSANDRA_KEYSPACE, id))
+  for _, v in ipairs(rows) do
+    return cjson.decode(v.value)
+  end
+end
+
+
+function _M.deleteTenant(session, id)
+  local ok, err = session:execute(string.format("DELETE FROM %s.tenant where tenant_id='%s'", CASSANDRA_KEYSPACE, id))
+  if err then
+    request.err(500, 'Error deleting tenant: ' .. err)
+  end
+end
+
+function _M.createSubscription(session, key)
+  return session:execute(string.format("INSERT into %s.subscription (key) values ('%s')", CASSANDRA_KEYSPACE, key))
+end
+
+function _M.deleteSubscription(session, key)
+  return session:execute(string.format("DELETE from %s.subscription where key='%s'", CASSANDRA_KEYSPACE, key))
+end
+
+
+function _M.addSwagger(session, id, swagger)
+  local ok, err = session:execute(string.format("INSERT into %s.swagger (swagger_id, value) VALUES ('%s', '%s')", CASSANDRA_KEYSPACE, id, swagger))
+  if err then
+    request.err(500, utils.concatStrings({'Error saving swagger: ', err}))
+  end
+end
+
+function _M.getSwagger(session, id)
+  local rows, err = session:execute(string.format("SELECT value FROM %s.swagger where swagger_id='%s'", CASSANDRA_KEYSPACE, id))
+  if err then
+    request.err(500, utils.concatStrings({'Error getting swagger: ', err}))
+  end
+  for _, v in ipairs(rows) do
+    return v.value
+  end
+end
+
+function _M.getOAuthToken(session, provider, token)
+  local rows, err = session:execute(string.format("SELECT value FROM %s.oauth where provider='%s' and oauth_token='%s'", CASSANDRA_KEYSPACE, provider, token))
+  if err then
+    request.err(utils.concatStrings({500, 'Error getting oauth token: ', err}))
+  end
+  for _, v in ipairs(rows) do
+    return v.value
+  end
+end
+
+function _M.saveOAuthToken(session, provider, token, body)
+  local ok, err = session:execute(string.format("INSERT INTO %s.oauth (provider, oauth_token, value) VALUES ('%s', '%s', '%s')", CASSANDRA_KEYSPACE, provider, token, cjson.encode(body)))
+  if err then
+    request.err(500, utils.concatStrings({'Error setting oauth token: ', err}))
+  end
+  return nil
+end
+
+function _M.subscriptionExists(session, key)
+  local rows, err = session.execute(string.format("SELECT * from %s.subscription where key='%s'", CASSANDRA_KEYSPACE, key))
+  if err then
+    request.err(500, utils.concatStrings({'Error retrieving subscription: ', err}))
+  end
+  if #rows > 0 then
+    return 1
+  end
+  return 0
+end
+
+function _M.getRateLimit(session, key)
+  local ok, err = session:execute(string.format("SELECT * from %s.ratelimit where key='%s'", CASSANDRA_KEYSPACE, key))
+  if err then
+    request.err(500, utils.concatStrings({'Error retrieviing ratelimiting key: ', err}))
+  end
+end
+
+
+function _M.setRateLimit(session, key, value, interval, expires)
+  local ok, err = session:execute(string.format("INSERT into %s.ratelimit (key) VALUES ('%s') USING TTL %d", CASSANDRA_KEYSPACE, key, expires))
+  if err then
+    request.err(500, utils.concatStrings({'Error setting ratelimiting key: ', err}))
+  end
+  return nil
+end
+
+function _M.stringSplit(key)
+  local result = {}
+  local splitter = key:gmatch('[^:]*')
+  result[0] = splitter()
+  splitter()
+  result[1] = splitter()
+  splitter()
+  result[2] = splitter()
+  return result
+end
+
+function _M.close()
+  return nil
+end
+
+return _M
diff --git a/scripts/lua/lib/dataStore.lua b/scripts/lua/lib/dataStore.lua
index 52aa773..55937fc 100644
--- a/scripts/lua/lib/dataStore.lua
+++ b/scripts/lua/lib/dataStore.lua
@@ -22,11 +22,12 @@ function DataStore:init()
 end
 
 -- right now just using this for the tests
-function DataStore:initWithDriver(ds)
+function DataStore:initWithDriver(ds, driver)
 local o = {}
   setmetatable(o, self)
   self.__index = self
-  o.impl = require('lib/redis')
+  driver = (driver ~= nil) and driver or "redis"
+  o.impl = require(utils.concatStrings({'lib/', driver}))
   o.ds = ds
   return o
 end
@@ -119,8 +120,8 @@ function DataStore:saveOAuthToken(provider, token, body, ttl)
   return self.impl.saveOAuthToken(self.ds, provider, token, body, ttl)
 end
 
-function DataStore:exists(key)
-  return self.impl.exists(self.ds, key)
+function DataStore:subscriptionExists(key)
+  return self.impl.subscriptionExists(self.ds, key)
 end
 
 function DataStore:setRateLimit(key, value, interval, expires)
diff --git a/scripts/lua/lib/redis.lua b/scripts/lua/lib/redis.lua
index 78caab4..162d87b 100644
--- a/scripts/lua/lib/redis.lua
+++ b/scripts/lua/lib/redis.lua
@@ -599,6 +599,6 @@ end
 
 _M.get = get
 _M.set = set
-_M.exists = exists
+_M.subscriptionExists = exists
 _M.expire = expire
 return _M
diff --git a/scripts/lua/management/lib/apis.lua b/scripts/lua/management/lib/apis.lua
index b42d70f..221c49e 100644
--- a/scripts/lua/management/lib/apis.lua
+++ b/scripts/lua/management/lib/apis.lua
@@ -44,6 +44,7 @@ function _M.getAllAPIs(dataStore, queryParams)
   end
   if apiList == nil then
     apiList = {}
+    print ('apis: ' .. cjson.encode(apis))
     for k, v in pairs(apis) do
       if k%2 == 0 then
         apiList[#apiList+1] = cjson.decode(v)
diff --git a/scripts/lua/management/lib/subscriptions.lua b/scripts/lua/management/lib/subscriptions.lua
index 9671ef1..f3b5a6a 100644
--- a/scripts/lua/management/lib/subscriptions.lua
+++ b/scripts/lua/management/lib/subscriptions.lua
@@ -61,7 +61,7 @@ end
 function _M.deleteSubscription(red, artifactId, tenantId, clientId)
   local subscriptionKey = utils.concatStrings({"subscriptions:tenant:", tenantId, ":api:", artifactId})
   local key = utils.concatStrings({subscriptionKey, ":key:", clientId})
-  if redis.exists(red, key) == 1 then
+  if redis.subscriptionExists(red, key) == 1 then
     redis.deleteSubscription(red, key)
   else
     local pattern = utils.concatStrings({subscriptionKey, ":clientsecret:" , clientId, ":*"})
@@ -73,4 +73,4 @@ function _M.deleteSubscription(red, artifactId, tenantId, clientId)
   return true
 end
 
-return _M
\ No newline at end of file
+return _M
diff --git a/scripts/lua/oauth/google.lua b/scripts/lua/oauth/google.lua
index 74f5454..60d3351 100644
--- a/scripts/lua/oauth/google.lua
+++ b/scripts/lua/oauth/google.lua
@@ -28,7 +28,7 @@ local redis = require "lib/redis"
 local _M = {} 
 function _M.process (dataStore, token)
 
-  local result = dataStore:getOAuthToken(dataStore, 'google', token) 
+  local result = dataStore:getOAuthToken('google', token) 
   
   local httpc = http.new()
   if result ~= ngx.null then 
diff --git a/scripts/lua/policies/security/apiKey.lua b/scripts/lua/policies/security/apiKey.lua
index 2746619..a7fc591 100644
--- a/scripts/lua/policies/security/apiKey.lua
+++ b/scripts/lua/policies/security/apiKey.lua
@@ -52,7 +52,7 @@ function validate(dataStore, tenant, gatewayPath, apiId, scope, apiKey)
     k = utils.concatStrings({'subscriptions:tenant:', tenant, ':api:', apiId})
   end
   k = utils.concatStrings({k, ':key:', apiKey})
-  if dataStore:exists(k) == 1 then
+  if dataStore:subscriptionExists(k) == 1 then
     return k
   else
     return nil
diff --git a/scripts/lua/policies/security/clientSecret.lua b/scripts/lua/policies/security/clientSecret.lua
index af865af..4df573e 100644
--- a/scripts/lua/policies/security/clientSecret.lua
+++ b/scripts/lua/policies/security/clientSecret.lua
@@ -112,7 +112,7 @@ function validate(dataStore, tenant, gatewayPath, apiId, scope, clientId, client
   end
   -- using the same key location in redis, just using :clientsecret: instead of :key:
   k = utils.concatStrings({k, ':clientsecret:', clientId, ':', clientSecret})
-  if dataStore:exists(k) == 1 then
+  if dataStore:subscriptionExists(k) == 1 then
     return k
   else
     return nil
diff --git a/scripts/lua/routing.lua b/scripts/lua/routing.lua
index cd1d11b..7dc9756 100644
--- a/scripts/lua/routing.lua
+++ b/scripts/lua/routing.lua
@@ -50,7 +50,8 @@ function _M.processCall(dataStore)
   if redisKey == nil then
     request.err(404, 'Not found.')
   end
-  local obj = cjson.decode(dataStore:getResource(redisKey, "resources"))
+  local obj = dataStore:getResource(redisKey, "resources")
+  obj = cjson.decode(obj)
   cors.processCall(obj)
   ngx.var.tenantNamespace = obj.tenantNamespace
   ngx.var.tenantInstance = obj.tenantInstance
diff --git a/tests/scripts/lua/lib/redis.lua b/tests/scripts/lua/lib/redis.lua
index 9b45124..f9bf625 100644
--- a/tests/scripts/lua/lib/redis.lua
+++ b/tests/scripts/lua/lib/redis.lua
@@ -28,7 +28,7 @@ describe('Testing Redis module', function()
     _G.ngx = fakengx.new()
     red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     operations = {
       GET = {
         backendUrl = 'https://httpbin.org/get',
@@ -39,7 +39,7 @@ describe('Testing Redis module', function()
   it('should look up an api by one of it\'s member resources', function() 
     local red = fakeredis.new() 
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local sampleResource = cjson.decode([[
       {
         "apiId": "a12341234",
@@ -57,7 +57,7 @@ describe('Testing Redis module', function()
   end) 
   it('should generate resource object to store in redis', function()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     -- Resource object with no policies or security
     local apiId = 12345
     local resourceObj = {
@@ -128,7 +128,7 @@ describe('Testing Redis module', function()
     local key = 'resources:guest:hello'
     local field = 'resources'
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     -- resource doesn't exist in redis
     local generated = dataStore:getResource(key, field)
     assert.are.same(nil, generated)
@@ -136,7 +136,7 @@ describe('Testing Redis module', function()
     -- resource exists in redis
     local expected = dataStore:generateResourceObj(operations, nil)
     red:hset(key, field, expected)
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     generated = dataStore:getResource(key, field)
     assert.are.same(expected, generated)
   end)
@@ -145,7 +145,7 @@ describe('Testing Redis module', function()
     local key = 'resources:guest:hello'
     local field = 'resources'
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local expected = dataStore:generateResourceObj(operations, nil)
     dataStore:createResource(key, field, expected)
     local generated = dataStore:getResource(key, field)
@@ -157,7 +157,7 @@ describe('Testing Redis module', function()
     local key = 'resources:guest:hello'
     local field = 'resources'
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     dataStore:deleteResource(key, field)
     assert.are.equal(ngx._exit, 404)
     -- Key exists - deleted properly
@@ -171,7 +171,7 @@ describe('Testing Redis module', function()
   it('shoud create an API Key subscription', function()
     local key = 'subscriptions:test:apikey'
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     dataStore:createSubscription(key)
     assert.are.same(1, red:exists(key))
   end)
@@ -180,7 +180,7 @@ describe('Testing Redis module', function()
     -- API key doesn't exist in redis - throw 404
     local key = 'subscriptions:test:apikey'
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     dataStore:deleteSubscription(key)
     assert.are.equal(404, ngx._exit)
 
diff --git a/tests/scripts/lua/security.lua b/tests/scripts/lua/security.lua
index 239650d..02e31c9 100644
--- a/tests/scripts/lua/security.lua
+++ b/tests/scripts/lua/security.lua
@@ -30,7 +30,9 @@ describe('API Key module', function()
     local red = fakeredis.new()
     local ngx = fakengx.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+
+    local dataStore = ds.initWithDriver(red, 'redis')
+
     local ngxattrs = cjson.decode([[
       {
         "tenant":"abcd",
@@ -83,7 +85,7 @@ describe('API Key module', function()
   it('Returns nil with a bad apikey', function()
     local red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngx = fakengx.new()
     local ngxattrs = cjson.decode([[
       {
@@ -108,7 +110,7 @@ describe('API Key module', function()
   it('Checks for a key with a custom header', function()
     local red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngx = fakengx.new()
     local ngxattrs = cjson.decode([[
       {
@@ -162,7 +164,8 @@ describe('API Key module', function()
   it('Checks for a key with a custom header and hash configuration', function()
     local red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngx = fakengx.new()
     local ngxattrs = cjson.decode([[
       {
@@ -240,7 +243,7 @@ describe('OAuth security module', function()
   it('Loads a facebook token from the cache without a valid app id', function()
     local red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local token = "test"
     local ngxattrs = [[
       {
@@ -267,7 +270,7 @@ describe('OAuth security module', function()
   it('Loads a facebook token from the cache with a valid app id', function()
     local red = fakeredis.new()
     local ds = require "lib/dataStore"
-    local dataStore = ds.initWithDriver(red)
+    local dataStore = ds.initWithDriver(red, 'redis')
     local token = "test"
     local appid = "app"
     local ngxattrs = [[
@@ -298,6 +301,8 @@ describe('Client Secret Module', function()
   it('Validates a client secret pair with default names', function()
     local ngx = fakengx.new()
     local red = fakeredis.new()
+    local ds = require ('lib/dataStore')
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngxattrs = [[
       {
        "http_X_Client_ID":"abcd",
@@ -316,12 +321,15 @@ describe('Client Secret Module', function()
       }
     ]]
     red:set("subscriptions:tenant:1234:resource:v1/test:clientsecret:abcd:fakehash", "true")
-    local result = clientSecret.processWithHashFunction(red, cjson.decode(securityObj), function() return "fakehash" end)
+    local result = clientSecret.processWithHashFunction(dataStore, cjson.decode(securityObj), function() return "fakehash" end)
     assert(result)
   end)
   it('Validates a client secret pair with new names', function()
     local ngx = fakengx.new()
     local red = fakeredis.new()
+
+    local ds = require ('lib/dataStore')
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngxattrs = [[
       {
         "http_test_id":"abcd",
@@ -342,12 +350,14 @@ describe('Client Secret Module', function()
       }
     ]]
     red:set("subscriptions:tenant:1234:resource:v1/test:clientsecret:abcd:fakehash", "true")
-    local result = clientSecret.processWithHashFunction(red, cjson.decode(securityObj), function() return "fakehash" end)
+    local result = clientSecret.processWithHashFunction(dataStore, cjson.decode(securityObj), function() return "fakehash" end)
     assert(result)
   end)
   it('Doesn\'t work without a client id', function()
     local ngx = fakengx.new()
     local red = fakeredis.new()
+    local ds = require ('lib/dataStore')
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngxattrs = [[
       {
        "http_X_Client_Secret":"1234",
@@ -368,6 +378,8 @@ describe('Client Secret Module', function()
   it('Doesn\'t work without a Client Secret', function()
     local ngx = fakengx.new()
     local red = fakeredis.new()
+    local ds = require ('lib/dataStore')
+    local dataStore = ds.initWithDriver(red, 'redis')
     local ngxattrs = [[
       {
        "http_X_Client_ID":"abcd",
@@ -385,7 +397,7 @@ describe('Client Secret Module', function()
       }
     ]]
     red:set("subscriptions:tenant:1234:resource:v1/test:clientsecret:abcd:fakehash", "true")
-    local result = clientSecret.processWithHashFunction(red, cjson.decode(securityObj), function() return "fakehash" end)
+    local result = clientSecret.processWithHashFunction(dataStore, cjson.decode(securityObj), function() return "fakehash" end)
     assert.falsy(result)
   end)
 end)
diff --git a/tools/cassandra/schema.csql b/tools/cassandra/schema.csql
new file mode 100644
index 0000000..f5bdc3d
--- /dev/null
+++ b/tools/cassandra/schema.csql
@@ -0,0 +1,119 @@
+
+CREATE KEYSPACE apigateway WITH replication = {'class': 'NetworkTopologyStrategy', 'datacenter1': '1'}  AND durable_writes = false;
+
+CREATE TABLE apigateway.resource (
+    tenant_id text,
+    resource_path text,
+    api_id text,
+    value text,
+    PRIMARY KEY (tenant_id, resource_path)
+) WITH CLUSTERING ORDER BY (resource_path ASC)
+    AND bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+CREATE TABLE apigateway.api (
+    api_id text PRIMARY KEY,
+    tenant_id text,
+    value text
+) WITH bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+CREATE TABLE apigateway.oauth (
+    provider text,
+    oauth_token text,
+    value text,
+    PRIMARY KEY (provider, oauth_token)
+) WITH CLUSTERING ORDER BY (oauth_token ASC)
+    AND bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+CREATE TABLE apigateway.swagger (
+    swagger_id text PRIMARY KEY,
+    value text
+) WITH bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+CREATE TABLE apigateway.tenant (
+    tenant_id text PRIMARY KEY,
+    value text
+) WITH bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+CREATE TABLE apigateway.subscription (
+    key text PRIMARY KEY
+) WITH bloom_filter_fp_chance = 0.01
+    AND caching = {'keys': 'ALL', 'rows_per_partition': 'NONE'}
+    AND comment = ''
+    AND compaction = {'class': 'org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy', 'max_threshold': '32', 'min_threshold': '4'}
+    AND compression = {'chunk_length_in_kb': '64', 'class': 'org.apache.cassandra.io.compress.LZ4Compressor'}
+    AND crc_check_chance = 1.0
+    AND dclocal_read_repair_chance = 0.1
+    AND default_time_to_live = 0
+    AND gc_grace_seconds = 864000
+    AND max_index_interval = 2048
+    AND memtable_flush_period_in_ms = 0
+    AND min_index_interval = 128
+    AND read_repair_chance = 0.0
+    AND speculative_retry = '99PERCENTILE';
+
+
diff --git a/tools/cassandra/setup.sh b/tools/cassandra/setup.sh
new file mode 100755
index 0000000..2703ef9
--- /dev/null
+++ b/tools/cassandra/setup.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+cqlsh 127.0.0.1 9042 --cqlversion="3.4.4" -f tools/cassandra/schema.csql
diff --git a/tools/travis/build.sh b/tools/travis/build.sh
index fe80906..4450233 100755
--- a/tools/travis/build.sh
+++ b/tools/travis/build.sh
@@ -1,55 +1,101 @@
 #!/bin/bash
-set -e
-set -x
+set-e
+set-x
 
-# Build script for Travis-CI.
+#Build script for Travis-CI.
 
-SCRIPTDIR=$(cd $(dirname "$0") && pwd)
+SCRIPTDIR=$(cd$(dirname "$0") && pwd)
 ROOTDIR="$SCRIPTDIR/../.."
 WHISKDIR="$ROOTDIR/../openwhisk"
 
-# Install OpenWhisk
-cd $WHISKDIR/ansible
+#Install OpenWhisk
+cd$WHISKDIR/ansible
 
-ANSIBLE_CMD="ansible-playbook -i environments/local  -e docker_image_prefix=openwhisk"
+ANSIBLE_CMD="ansible-playbook-i environments/local  -e docker_image_prefix=openwhisk"
 
-$ANSIBLE_CMD setup.yml
-$ANSIBLE_CMD prereq.yml
-$ANSIBLE_CMD couchdb.yml
-$ANSIBLE_CMD initdb.yml
+$ANSIBLE_CMDsetup.yml
+$ANSIBLE_CMDprereq.yml
+$ANSIBLE_CMDcouchdb.yml
+$ANSIBLE_CMDinitdb.yml
 
-#build docker image locally 
-pushd $ROOTDIR
+#builddocker image locally
+pushd$ROOTDIR
 pwd
-docker build . -t "openwhisk/apigateway" 
+dockerbuild . -t "openwhisk/apigateway"
 popd
 
-#Use local
-$ANSIBLE_CMD apigateway.yml -e apigateway_local_build=true
+#Uselocal
+$ANSIBLE_CMDapigateway.yml -e apigateway_local_build=true
 
-#Use dockerhub
-#$ANSIBLE_CMD apigateway.yml
+#Usedockerhub
+#$ANSIBLE_CMDapigateway.yml
 
-cd $WHISKDIR
+cd$WHISKDIR
 
-TERM=dumb ./gradlew tools:cli:distDocker -PdockerImagePrefix=openwhisk
+TERM=dumb./gradlew tools:cli:distDocker -PdockerImagePrefix=openwhisk
 
-cd $WHISKDIR/ansible
+cd$WHISKDIR/ansible
 
 
-$ANSIBLE_CMD wipe.yml
-$ANSIBLE_CMD openwhisk.yml
+$ANSIBLE_CMDwipe.yml
+$ANSIBLE_CMDopenwhisk.yml
+
+#Set Environment
+exportOPENWHISK_HOME=$WHISKDIR
+
+#Tests
+cd$WHISKDIR
+catwhisk.properties
+WSK_TESTS_DEPS_EXCLUDE="-x:core:swift3Action:distDocker -x :core:pythonAction:distDocker -x :core:javaAction:distDocker -x :core:nodejsAction:distDocker -x :core:actionProxy:distDocker -x :sdk:docker:distDocker -x :core:python2Action:copyFiles -x :core:python2Action:distDocker -x :tests:dat:blackbox:badaction:distDocker -x :tests:dat:blackbox:badproxy:distDocker"
+TERM=dumb./gradlew tests:test --tests apigw.healthtests.* ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
+TERM=dumb./gradlew tests:test --tests whisk.core.apigw.* ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
+TERM=dumb./gradlew tests:test --tests whisk.core.cli.test.ApiGwTests ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
+
+
+#Test again with cassandra
+cd$SCRIPTDIR
+cpdeploy.yml $WHISKDIR/ansible/roles/apigateway/tasks
+
+#builddocker image locally
+pushd$ROOTDIR
+pwd
+dockerbuild . -t "openwhisk/apigateway"
+popd
+
+# cd../../../openwhisk/ansible
+
+#Uselocal
+$ANSIBLE_CMDapigateway.yml -e apigateway_local_build=true
+
+#Usedockerhub
+#$ANSIBLE_CMDapigateway.yml
+
+cd$WHISKDIR
+
+TERM=dumb./gradlew tools:cli:distDocker -PdockerImagePrefix=openwhisk
+
+cd$WHISKDIR/ansible
+
+
+$ANSIBLE_CMDwipe.yml
+$ANSIBLE_CMDopenwhisk.yml
+
+#Set Environment
+exportOPENWHISK_HOME=$WHISKDIR
+
+#Tests
+cd$WHISKDIR
+catwhisk.properties
+WSK_TESTS_DEPS_EXCLUDE="-x:core:swift3Action:distDocker -x :core:pythonAction:distDocker -x :core:javaAction:distDocker -x :core:nodejsAction:distDocker -x :core:actionProxy:distDocker -x :sdk:docker:distDocker -x :core:python2Action:copyFiles -x :core:python2Action:distDocker -x :tests:dat:blackbox:badaction:distDocker -x :tests:dat:blackbox:badproxy:distDocker"
+TERM=dumb./gradlew tests:test --tests apigw.healthtests.* ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
+TERM=dumb./gradlew tests:test --tests whisk.core.apigw.* ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
+TERM=dumb./gradlew tests:test --tests whisk.core.cli.test.ApiGwTests ${WSK_TESTS_DEPS_EXCLUDE}
+sleep60
 
-# Set Environment
-export OPENWHISK_HOME=$WHISKDIR
 
-# Tests
-cd $WHISKDIR
-cat whisk.properties
-WSK_TESTS_DEPS_EXCLUDE="-x :core:swift3Action:distDocker -x :core:pythonAction:distDocker -x :core:javaAction:distDocker -x :core:nodejsAction:distDocker -x :core:actionProxy:distDocker -x :sdk:docker:distDocker -x :core:python2Action:copyFiles -x :core:python2Action:distDocker -x :tests:dat:blackbox:badaction:distDocker -x :tests:dat:blackbox:badproxy:distDocker"
-TERM=dumb ./gradlew tests:test --tests apigw.healthtests.* ${WSK_TESTS_DEPS_EXCLUDE}
-sleep 60
-TERM=dumb ./gradlew tests:test --tests whisk.core.apigw.* ${WSK_TESTS_DEPS_EXCLUDE}
-sleep 60
-TERM=dumb ./gradlew tests:test --tests whisk.core.cli.test.ApiGwTests ${WSK_TESTS_DEPS_EXCLUDE}
 
diff --git a/tools/travis/deploy.yml b/tools/travis/deploy.yml
new file mode 100644
index 0000000..67430f7
--- /dev/null
+++ b/tools/travis/deploy.yml
@@ -0,0 +1,32 @@
+---
+# This role will install apigateway
+
+- name: "pull the openwhisk/apigateway image"
+  shell: "docker pull openwhisk/apigateway"
+  when: apigateway_local_build is undefined
+
+- name: (re)start apigateway
+  docker_container:
+    name: apigateway
+    image: openwhisk/apigateway
+    state: started
+    recreate: true
+    restart_policy: "{{ docker.restart.policy }}"
+    hostname: apigateway
+    env:
+      "DATASTORE": "cassandra"
+      "CASSANDRA_HOST": "172.17.0.1"
+      "CASSANDRA_KEYSPACE": "apigateway"
+      "PUBLIC_MANAGEDURL_HOST": "{{ inventory_hostname }}"
+      "PUBLIC_MANAGEDURL_PORT": "{{ apigateway.port.mgmt }}"
+    ports:
+      - "{{ apigateway.port.mgmt }}:8080"
+      - "{{ apigateway.port.api }}:9000"
+
+- name: wait until the API Gateway in this host is up and running
+  uri:
+    url: "http://{{ groups['apigateway'] | first }}:{{ apigateway.port.api }}/v1/apis"
+  register: result
+  until: result.status == 200
+  retries: 12
+  delay: 5


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services