You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@trafficcontrol.apache.org by GitBox <gi...@apache.org> on 2018/03/08 22:31:13 UTC

[GitHub] dangogh closed pull request #1950: Added CRUD for /parameters

dangogh closed pull request #1950: Added CRUD for /parameters
URL: https://github.com/apache/incubator-trafficcontrol/pull/1950
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/lib/go-tc/parameters.go b/lib/go-tc/parameters.go
index cbf7afbeb4..5949e89d26 100644
--- a/lib/go-tc/parameters.go
+++ b/lib/go-tc/parameters.go
@@ -36,3 +36,17 @@ type Parameter struct {
 	Secure      bool            `json:"secure" db:"secure"`
 	Value       string          `json:"value" db:"value"`
 }
+
+// ParameterNullable - a struct version that allows for all fields to be null, mostly used by the API side
+type ParameterNullable struct {
+	//
+	// NOTE: the db: struct tags are used for testing to map to their equivalent database column (if there is one)
+	//
+	ConfigFile  *string         `json:"configFile" db:"config_file"`
+	ID          *int            `json:"id" db:"id"`
+	LastUpdated *TimeNoMod      `json:"lastUpdated" db:"last_updated"`
+	Name        *string         `json:"name" db:"name"`
+	Profiles    json.RawMessage `json:"profiles" db:"profiles"`
+	Secure      *bool           `json:"secure" db:"secure"`
+	Value       *string         `json:"value" db:"value"`
+}
diff --git a/lib/go-tc/physlocations.go b/lib/go-tc/physlocations.go
index 76d6c18a97..6d957eeec8 100644
--- a/lib/go-tc/physlocations.go
+++ b/lib/go-tc/physlocations.go
@@ -45,18 +45,18 @@ type PhysLocationNullable struct {
 	//
 	// NOTE: the db: struct tags are used for testing to map to their equivalent database column (if there is one)
 	//
-	Address     *string   `json:"address" db:"address"`
-	City        *string   `json:"city" db:"city"`
-	Comments    *string   `json:"comments" db:"comments"`
-	Email       *string   `json:"email" db:"email"`
-	ID          *int      `json:"id" db:"id"`
-	LastUpdated TimeNoMod `json:"lastUpdated" db:"last_updated"`
-	Name        *string   `json:"name" db:"name"`
-	Phone       *string   `json:"phone" db:"phone"`
-	POC         *string   `json:"poc" db:"poc"`
-	RegionID    *int      `json:"regionId" db:"region"`
-	RegionName  *string   `json:"region" db:"region_name"`
-	ShortName   *string   `json:"shortName" db:"short_name"`
-	State       *string   `json:"state" db:"state"`
-	Zip         *string   `json:"zip" db:"zip"`
+	Address     *string    `json:"address" db:"address"`
+	City        *string    `json:"city" db:"city"`
+	Comments    *string    `json:"comments" db:"comments"`
+	Email       *string    `json:"email" db:"email"`
+	ID          *int       `json:"id" db:"id"`
+	LastUpdated *TimeNoMod `json:"lastUpdated" db:"last_updated"`
+	Name        *string    `json:"name" db:"name"`
+	Phone       *string    `json:"phone" db:"phone"`
+	POC         *string    `json:"poc" db:"poc"`
+	RegionID    *int       `json:"regionId" db:"region"`
+	RegionName  *string    `json:"region" db:"region_name"`
+	ShortName   *string    `json:"shortName" db:"short_name"`
+	State       *string    `json:"state" db:"state"`
+	Zip         *string    `json:"zip" db:"zip"`
 }
diff --git a/lib/go-tc/regions.go b/lib/go-tc/regions.go
index 379d56e3f9..76a6816ecf 100644
--- a/lib/go-tc/regions.go
+++ b/lib/go-tc/regions.go
@@ -19,10 +19,12 @@ package tc
  * under the License.
  */
 
+// RegionsResponse ...
 type RegionsResponse struct {
 	Response []Region `json:"response"`
 }
 
+// Region ...
 type Region struct {
 	DivisionName string    `json:"divisionName"`
 	Division     int       `json:"division" db:"division"`
diff --git a/lib/go-tc/system_info.go b/lib/go-tc/system_info.go
index 7ea662ac80..eca2c3e0c9 100644
--- a/lib/go-tc/system_info.go
+++ b/lib/go-tc/system_info.go
@@ -21,6 +21,6 @@ package tc
 
 type SystemInfoResponse struct {
 	Response struct {
-		Parameters map[string]string `json:"parameters"`
+		ParametersNullable map[string]string `json:"parameters"`
 	} `json:"response"`
 }
diff --git a/traffic_ops/client/v13/cachegroup.go b/traffic_ops/client/v13/cachegroup.go
index 2203ffb7bc..27ef2d10e2 100644
--- a/traffic_ops/client/v13/cachegroup.go
+++ b/traffic_ops/client/v13/cachegroup.go
@@ -101,7 +101,7 @@ func (to *Session) GetCacheGroupByID(id int) ([]tc.CacheGroup, ReqInf, error) {
 
 // GET a CacheGroup by the CacheGroup name
 func (to *Session) GetCacheGroupByName(name string) ([]tc.CacheGroup, ReqInf, error) {
-	url := fmt.Sprintf("%s/name/%s", API_v13_CacheGroups, name)
+	url := fmt.Sprintf("%s?name=%s", API_v13_CacheGroups, name)
 	resp, remoteAddr, err := to.request(http.MethodGet, url, nil)
 	reqInf := ReqInf{CacheHitStatus: CacheHitStatusMiss, RemoteAddr: remoteAddr}
 	if err != nil {
diff --git a/traffic_ops/client/v13/cdn.go b/traffic_ops/client/v13/cdn.go
index 2396c350df..5e4c4d5b2c 100644
--- a/traffic_ops/client/v13/cdn.go
+++ b/traffic_ops/client/v13/cdn.go
@@ -101,7 +101,7 @@ func (to *Session) GetCDNByID(id int) ([]tc.CDN, ReqInf, error) {
 
 // GET a CDN by the CDN name
 func (to *Session) GetCDNByName(name string) ([]tc.CDN, ReqInf, error) {
-	url := fmt.Sprintf("%s/name/%s", API_v13_CDNs, name)
+	url := fmt.Sprintf("%s?name=%s", API_v13_CDNs, name)
 	resp, remoteAddr, err := to.request(http.MethodGet, url, nil)
 	reqInf := ReqInf{CacheHitStatus: CacheHitStatusMiss, RemoteAddr: remoteAddr}
 	if err != nil {
diff --git a/traffic_ops/client/v13/session.go b/traffic_ops/client/v13/session.go
index c117d82049..79b036886e 100644
--- a/traffic_ops/client/v13/session.go
+++ b/traffic_ops/client/v13/session.go
@@ -142,6 +142,41 @@ func (to *Session) login() (net.Addr, error) {
 	return remoteAddr, nil
 }
 
+// logout of Traffic Ops
+func (to *Session) logout() (net.Addr, error) {
+	credentials, err := loginCreds(to.UserName, to.Password)
+	if err != nil {
+		return nil, errors.New("creating login credentials: " + err.Error())
+	}
+
+	path := "/api/1.2/user/logout"
+	resp, remoteAddr, err := to.rawRequest("POST", path, credentials)
+	resp, remoteAddr, err = to.ErrUnlessOK(resp, remoteAddr, err, path)
+	if err != nil {
+		return remoteAddr, errors.New("requesting: " + err.Error())
+	}
+	defer resp.Body.Close()
+
+	var alerts tc.Alerts
+	if err := json.NewDecoder(resp.Body).Decode(&alerts); err != nil {
+		return remoteAddr, errors.New("decoding response JSON: " + err.Error())
+	}
+
+	success := false
+	for _, alert := range alerts.Alerts {
+		if alert.Level == "success" && alert.Text == "Successfully logged in." {
+			success = true
+			break
+		}
+	}
+
+	if !success {
+		return remoteAddr, fmt.Errorf("Logout failed, alerts string: %+v", alerts)
+	}
+
+	return remoteAddr, nil
+}
+
 // Login to traffic_ops, the response should set the cookie for this session
 // automatically. Start with
 //     to := traffic_ops.Login("user", "passwd", true)
@@ -172,6 +207,32 @@ func LoginWithAgent(toURL string, toUser string, toPasswd string, insecure bool,
 	return to, remoteAddr, nil
 }
 
+// Logout of traffic_ops
+func LogoutWithAgent(toURL string, toUser string, toPasswd string, insecure bool, userAgent string, useCache bool, requestTimeout time.Duration) (*Session, net.Addr, error) {
+	options := cookiejar.Options{
+		PublicSuffixList: publicsuffix.List,
+	}
+
+	jar, err := cookiejar.New(&options)
+	if err != nil {
+		return nil, nil, err
+	}
+
+	to := NewSession(toUser, toPasswd, toURL, userAgent, &http.Client{
+		Timeout: requestTimeout,
+		Transport: &http.Transport{
+			TLSClientConfig: &tls.Config{InsecureSkipVerify: insecure},
+		},
+		Jar: jar,
+	}, useCache)
+
+	remoteAddr, err := to.logout()
+	if err != nil {
+		return nil, remoteAddr, errors.New("logging out: " + err.Error())
+	}
+	return to, remoteAddr, nil
+}
+
 // ErrUnlessOk returns nil and an error if the given Response's status code is anything but 200 OK. This includes reading the Response.Body and Closing it. Otherwise, the given response and error are returned unchanged.
 func (to *Session) ErrUnlessOK(resp *http.Response, remoteAddr net.Addr, err error, path string) (*http.Response, net.Addr, error) {
 	if err != nil {
diff --git a/traffic_ops/testing/api/conf/traffic-ops-test.conf b/traffic_ops/testing/api/conf/traffic-ops-test.conf
index 0300bb38c2..6cf5290e52 100644
--- a/traffic_ops/testing/api/conf/traffic-ops-test.conf
+++ b/traffic_ops/testing/api/conf/traffic-ops-test.conf
@@ -12,9 +12,16 @@
         }
     },
     "trafficOps": {
-        "password": "twelve",
         "URL": "https://localhost:8443",
-        "user": "admin"
+        "password": "twelve",
+        "users": {
+            "disallowed": "disallowed",
+            "operations": "operations",
+            "admin": "admin",
+            "federation": "federation",
+            "portal": "portal",
+            "readOnly": "readOnly"
+        }
     },
     "trafficOpsDB": {
         "dbname": "to_test",
diff --git a/traffic_ops/testing/api/config/config.go b/traffic_ops/testing/api/config/config.go
index 2e6ee21218..c8e704c666 100644
--- a/traffic_ops/testing/api/config/config.go
+++ b/traffic_ops/testing/api/config/config.go
@@ -21,6 +21,7 @@ import (
 	"fmt"
 	"io/ioutil"
 	"os"
+	"reflect"
 
 	log "github.com/apache/incubator-trafficcontrol/lib/go-log"
 	"github.com/kelseyhightower/envconfig"
@@ -38,16 +39,38 @@ type TrafficOps struct {
 	// URL - The point to the Traffic Ops instance being tested
 	URL string `json:"URL" envconfig:"TO_URL" default:"https://localhost:8443"`
 
-	// User - The Traffic Ops test user hitting the API
-	User string `json:"user" envconfig:"TO_USER"`
-
 	// UserPassword - The Traffic Ops test user password hitting the API
 	UserPassword string `json:"password" envconfig:"TO_USER_PASSWORD"`
 
+	// UserPassword - The Traffic Ops Users
+	Users Users `json:"users"`
+
 	// Insecure - ignores insecure ssls certs that were self-generated
 	Insecure bool `json:"sslInsecure" envconfig:"SSL_INSECURE"`
 }
 
+// Users "users" section of the test-to-api.conf file
+type Users struct {
+
+	// DisallowedUser - The Traffic Ops Disallowed user
+	Disallowed string `json:"disallowed" envconfig:"TO_USER_DISALLOWED"`
+
+	// ReadOnly - The Traffic Ops Read Only user
+	ReadOnly string `json:"readOnly" envconfig:"TO_USER_READ_ONLY"`
+
+	// Operations - The Traffic Ops Operations user
+	Operations string `json:"operations" envconfig:"TO_USER_OPERATIONS"`
+
+	// AdminUser - The Traffic Ops Admin user
+	Admin string `json:"admin" envconfig:"TO_USER_ADMIN"`
+
+	// PortalUser - The Traffic Ops Portal user
+	Portal string `json:"portal" envconfig:"TO_USER_PORTAL"`
+
+	// FederationUser - The Traffic Ops Federation user
+	Federation string `json:"federation" envconfig:"TO_USER_FEDERATION"`
+}
+
 // TrafficOpsDB - config section
 type TrafficOpsDB struct {
 	// Name - Traffic Ops Database name where the test data will be setup
@@ -111,15 +134,114 @@ func LoadConfig(confPath string) (Config, error) {
 			return Config{}, fmt.Errorf("unmarshalling '%s': %v", confPath, err)
 		}
 	}
+	errs := validate(confPath, cfg)
+	if len(errs) > 0 {
+		fmt.Printf("configuration error:\n")
+		for _, e := range errs {
+			fmt.Printf("%v\n", e)
+		}
+		os.Exit(0)
+	}
 	err := envconfig.Process("traffic-ops-client-tests", &cfg)
 	if err != nil {
-		fmt.Printf("Cannot parse config: %v\n", err)
+		fmt.Errorf("cannot parse config: %v\n", err)
 		os.Exit(0)
 	}
 
 	return cfg, err
 }
 
+// validate all required fields in the config.
+func validate(confPath string, config Config) []error {
+
+	errs := []error{}
+
+	var f string
+	f = "TrafficOps"
+	toTag, ok := getStructTag(config, f)
+	if !ok {
+		errs = append(errs, fmt.Errorf("'%s' must be configured in %s", toTag, confPath))
+	}
+
+	if config.TrafficOps.URL == "" {
+		f = "URL"
+		tag, ok := getStructTag(config.TrafficOps, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.Disallowed == "" {
+		f = "Disallowed"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.ReadOnly == "" {
+		f = "ReadOnly"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.Operations == "" {
+		f = "Operations"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.Admin == "" {
+		f = "Admin"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.Portal == "" {
+		f = "Portal"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	if config.TrafficOps.Users.Federation == "" {
+		f = "Federation"
+		tag, ok := getStructTag(config.TrafficOps.Users, f)
+		if !ok {
+			errs = append(errs, fmt.Errorf("cannot lookup structTag: %s", f))
+		}
+		errs = append(errs, fmt.Errorf("'%s.%s' must be configured in %s", toTag, tag, confPath))
+	}
+
+	return errs
+}
+
+func getStructTag(thing interface{}, fieldName string) (string, bool) {
+	var tag string
+	var ok bool
+	t := reflect.TypeOf(thing)
+	if t != nil {
+		if f, ok := t.FieldByName(fieldName); ok {
+			tag = f.Tag.Get("json")
+			return tag, ok
+		}
+	}
+	return tag, ok
+}
+
 // ErrorLog - critical messages
 func (c Config) ErrorLog() log.LogLocation {
 	return log.LogLocation(c.Default.Log.Error)
diff --git a/traffic_ops/testing/api/v13/parameters_test.go b/traffic_ops/testing/api/v13/parameters_test.go
index 916e5460fb..84a23a19d8 100644
--- a/traffic_ops/testing/api/v13/parameters_test.go
+++ b/traffic_ops/testing/api/v13/parameters_test.go
@@ -16,7 +16,7 @@
 package v13
 
 import (
-	"fmt"
+	"sync"
 	"testing"
 
 	"github.com/apache/incubator-trafficcontrol/lib/go-log"
@@ -25,8 +25,11 @@ import (
 
 func TestParameters(t *testing.T) {
 
+	//toReqTimeout := time.Second * time.Duration(Config.Default.Session.TimeoutInSecs)
+	//SwitchSession(toReqTimeout, Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, Config.TrafficOps.UserPassword, Config.TrafficOps.Users.Portal, Config.TrafficOps.UserPassword)
+
 	CreateTestParameters(t)
-	//UpdateTestParameters(t)
+	UpdateTestParameters(t)
 	GetTestParameters(t)
 	DeleteTestParameters(t)
 
@@ -38,7 +41,7 @@ func CreateTestParameters(t *testing.T) {
 		resp, _, err := TOSession.CreateParameter(pl)
 		log.Debugln("Response: ", resp)
 		if err != nil {
-			t.Errorf("could not CREATE phys_locations: %v\n", err)
+			t.Errorf("could not CREATE parameters: %v\n", err)
 		}
 	}
 
@@ -83,33 +86,50 @@ func GetTestParameters(t *testing.T) {
 	}
 }
 
+func DeleteTestParametersParallel(t *testing.T) {
+
+	var wg sync.WaitGroup
+	for _, pl := range testData.Parameters {
+
+		wg.Add(1)
+		go func() {
+			defer wg.Done()
+			DeleteTestParameter(t, pl)
+		}()
+
+	}
+	wg.Wait()
+}
+
 func DeleteTestParameters(t *testing.T) {
 
 	for _, pl := range testData.Parameters {
-		// Retrieve the Parameter by name so we can get the id for the Update
-		resp, _, err := TOSession.GetParameterByNameAndConfigFile(pl.Name, pl.ConfigFile)
+		DeleteTestParameter(t, pl)
+	}
+}
+
+func DeleteTestParameter(t *testing.T, pl tc.Parameter) {
+
+	// Retrieve the Parameter by name so we can get the id for the Update
+	resp, _, err := TOSession.GetParameterByNameAndConfigFile(pl.Name, pl.ConfigFile)
+	if err != nil {
+		t.Errorf("cannot GET Parameter by name: %v - %v\n", pl.Name, err)
+	}
+	if len(resp) > 0 {
+		respParameter := resp[0]
+
+		delResp, _, err := TOSession.DeleteParameterByID(respParameter.ID)
+		if err != nil {
+			t.Errorf("cannot DELETE Parameter by name: %v - %v\n", err, delResp)
+		}
+
+		// Retrieve the Parameter to see if it got deleted
+		pls, _, err := TOSession.GetParameterByNameAndConfigFile(pl.Name, pl.ConfigFile)
 		if err != nil {
-			t.Errorf("cannot GET Parameter by name: %v - %v\n", pl.Name, err)
+			t.Errorf("error deleting Parameter name: %s\n", err.Error())
 		}
-		if len(resp) > 0 {
-			respParameter := resp[0]
-
-			delResp, _, err := TOSession.DeleteParameterByID(respParameter.ID)
-			if err != nil {
-				t.Errorf("cannot DELETE Parameter by name: %v - %v\n", err, delResp)
-			}
-			//time.Sleep(1 * time.Second)
-
-			// Retrieve the Parameter to see if it got deleted
-			pls, _, err := TOSession.GetParameterByNameAndConfigFile(pl.Name, pl.ConfigFile)
-			if err != nil {
-				t.Errorf("error deleting Parameter name: %s\n", err.Error())
-			}
-			if len(pls) > 0 {
-				t.Errorf("expected Parameter Name: %s and ConfigFile: %s to be deleted\n", pl.Name, pl.ConfigFile)
-			}
-		} else {
-			fmt.Printf("no resp ---> %v\n", resp)
+		if len(pls) > 0 {
+			t.Errorf("expected Parameter Name: %s and ConfigFile: %s to be deleted\n", pl.Name, pl.ConfigFile)
 		}
 	}
 }
diff --git a/traffic_ops/testing/api/v13/session.go b/traffic_ops/testing/api/v13/session.go
new file mode 100644
index 0000000000..8b615858a2
--- /dev/null
+++ b/traffic_ops/testing/api/v13/session.go
@@ -0,0 +1,52 @@
+package v13
+
+/*
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+
+import (
+	"time"
+
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/client/v13"
+	_ "github.com/lib/pq"
+)
+
+var (
+	TOSession *v13.Session
+)
+
+func SetupSession(toReqTimeout time.Duration, toURL string, toUser string, toPass string) error {
+	var err error
+
+	toReqTimeout = time.Second * time.Duration(Config.Default.Session.TimeoutInSecs)
+	TOSession, _, err = v13.LoginWithAgent(toURL, toUser, toPass, true, "to-api-v13-client-tests", true, toReqTimeout)
+	return err
+}
+
+func TeardownSession(toReqTimeout time.Duration, toURL string, toUser string, toPass string) error {
+	var err error
+	toReqTimeout = time.Second * time.Duration(Config.Default.Session.TimeoutInSecs)
+	TOSession, _, err = v13.LogoutWithAgent(toURL, toUser, toPass, true, "to-api-v13-client-tests", true, toReqTimeout)
+
+	return err
+}
+
+func SwitchSession(toReqTimeout time.Duration, toURL string, toOldUser string, toOldPass string, toNewUser string, toNewPass string) error {
+	err := TeardownSession(toReqTimeout, toURL, toOldUser, toOldPass)
+
+	// intentially skip errors so that we can continue with setup in the event of a 403
+
+	err = SetupSession(toReqTimeout, toURL, toNewUser, toNewPass)
+	return err
+}
diff --git a/traffic_ops/testing/api/v13/tc-fixtures.json b/traffic_ops/testing/api/v13/tc-fixtures.json
index 7000af9423..9b2e76cde8 100644
--- a/traffic_ops/testing/api/v13/tc-fixtures.json
+++ b/traffic_ops/testing/api/v13/tc-fixtures.json
@@ -193,122 +193,122 @@
             "value": "30"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.503311+00:00",
             "name": "key0",
-            "secure": false,
+            "secure": true,
             "value": "HOOJ3Ghq1x4gChp3iQkqVTcPlOj8UCi3"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.505157+00:00",
             "name": "key1",
-            "secure": false,
+            "secure": true,
             "value": "_9LZYkRnfCS0rCBF7fTQzM9Scwlp2FhO"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.508548+00:00",
             "name": "key2",
-            "secure": false,
+            "secure": true,
             "value": "AFpkxfc4oTiyFSqtY6_ohjt3V80aAIxS"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.401781+00:00",
             "name": "key3",
-            "secure": false,
+            "secure": true,
             "value": "AL9kzs_SXaRZjPWH8G5e2m4ByTTzkzlc"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.406601+00:00",
             "name": "key4",
-            "secure": false,
+            "secure": true,
             "value": "poP3n3szbD1U4vx1xQXV65BvkVgWzfN8"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.408784+00:00",
             "name": "key5",
-            "secure": false,
+            "secure": true,
             "value": "1ir32ng4C4w137p5oq72kd2wqmIZUrya"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.410854+00:00",
             "name": "key6",
-            "secure": false,
+            "secure": true,
             "value": "B1qLptn2T1b_iXeTCWDcVuYvANtH139f"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.412716+00:00",
             "name": "key7",
-            "secure": false,
+            "secure": true,
             "value": "PiCV_5OODMzBbsNFMWsBxcQ8v1sK0TYE"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.414638+00:00",
             "name": "key8",
-            "secure": false,
+            "secure": true,
             "value": "Ggpv6DqXDvt2s1CETPBpNKwaLk4fTM9l"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.416551+00:00",
             "name": "key9",
-            "secure": false,
+            "secure": true,
             "value": "qPlVT_s6kL37aqb6hipDm4Bt55S72mI7"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.418689+00:00",
             "name": "key10",
-            "secure": false,
+            "secure": true,
             "value": "BsI5A9EmWrobIS1FeuOs1z9fm2t2WSBe"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.420467+00:00",
             "name": "key11",
-            "secure": false,
+            "secure": true,
             "value": "A54y66NCIj897GjS4yA9RrsSPtCUnQXP"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.422414+00:00",
             "name": "key12",
-            "secure": false,
+            "secure": true,
             "value": "2jZH0NDPSJttIr4c2KP510f47EKqTQAu"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.424435+00:00",
             "name": "key13",
-            "secure": false,
+            "secure": true,
             "value": "XduT2FBjBmmVID5JRB5LEf9oR5QDtBgC"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.426125+00:00",
             "name": "key14",
-            "secure": false,
+            "secure": true,
             "value": "D9nH0SvK_0kP5w8QNd1UFJ28ulFkFKPn"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.427797+00:00",
             "name": "key15",
-            "secure": false,
+            "secure": true,
             "value": "udKXWYNwbXXweaaLzaKDGl57OixnIIcm"
         },
         {
-            "configFile": "url_sig_cdl-c2.config",
+            "configFile": "url_sig.config",
             "lastUpdated": "2018-01-19T19:01:21.431062+00:00",
             "name": "error_url",
-            "secure": false,
+            "secure": true,
             "value": "403"
         },
         {
diff --git a/traffic_ops/testing/api/v13/todb.go b/traffic_ops/testing/api/v13/todb.go
new file mode 100644
index 0000000000..19f26d51c5
--- /dev/null
+++ b/traffic_ops/testing/api/v13/todb.go
@@ -0,0 +1,750 @@
+/*
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+
+package v13
+
+import (
+	"database/sql"
+	"fmt"
+	"os"
+
+	"github.com/apache/incubator-trafficcontrol/lib/go-log"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/auth"
+)
+
+var (
+	db *sql.DB
+)
+
+// OpenConnection ...
+func OpenConnection() (*sql.DB, error) {
+	var err error
+	sslStr := "require"
+	if !Config.TrafficOpsDB.SSL {
+		sslStr = "disable"
+	}
+
+	db, err = sql.Open("postgres", fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", Config.TrafficOpsDB.User, Config.TrafficOpsDB.Password, Config.TrafficOpsDB.Hostname, Config.TrafficOpsDB.Name, sslStr))
+
+	if err != nil {
+		log.Errorf("opening database: %v\n", err)
+		return nil, fmt.Errorf("transaction failed: %s", err)
+	}
+	return db, err
+}
+
+// SetupTestData ...
+func SetupTestData(*sql.DB) error {
+	var err error
+
+	err = SetupTenants(db)
+	if err != nil {
+		fmt.Printf("\nError setting up tenants %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupCDNs(db)
+	if err != nil {
+		fmt.Printf("\nError setting up cdns %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupRoles(db)
+	if err != nil {
+		fmt.Printf("\nError setting up roles %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupTmusers(db)
+	if err != nil {
+		fmt.Printf("\nError setting up tm_user %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupStatuses(db)
+	if err != nil {
+		fmt.Printf("\nError setting up status %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupDivisions(db)
+	if err != nil {
+		fmt.Printf("\nError setting up division %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	err = SetupRegions(db)
+	if err != nil {
+		fmt.Printf("\nError setting up region %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+		os.Exit(1)
+	}
+
+	/*
+		err = SetupProfiles(db)
+		if err != nil {
+			fmt.Printf("\nError setting up profile %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupProfileParameters(db)
+		if err != nil {
+			fmt.Printf("\nError setting up parameter %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupCacheGroups(db)
+		if err != nil {
+			fmt.Printf("\nError setting up cachegroup %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+
+		err = SetupPhysLocations(db)
+		if err != nil {
+			fmt.Printf("\nError setting up phys_location %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupServers(db)
+		if err != nil {
+			fmt.Printf("\nError setting up server %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupAsns(db)
+		if err != nil {
+			fmt.Printf("\nError setting up asn %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupDeliveryServices(db)
+		if err != nil {
+			fmt.Printf("\nError setting up deliveryservice %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupRegexes(db)
+		if err != nil {
+			fmt.Printf("\nError setting up regex %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupDeliveryServiceRegexes(db)
+		if err != nil {
+			fmt.Printf("\nError setting up deliveryservice_regex %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupDeliveryServiceTmUsers(db)
+		if err != nil {
+			fmt.Printf("\nError setting up deliveryservice_tmuser %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupDeliveryServiceServers(db)
+		if err != nil {
+			fmt.Printf("\nError setting up deliveryservice_server %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupJobStatuses(db)
+		if err != nil {
+			fmt.Printf("\nError setting up job_status %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupJobAgents(db)
+		if err != nil {
+			fmt.Printf("\nError setting up job_agent %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+
+		err = SetupJobs(db)
+		if err != nil {
+			fmt.Printf("\nError setting up job %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, err)
+			os.Exit(1)
+		}
+	*/
+
+	return err
+}
+
+// SetupRoles ...
+func SetupRoles(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO role (id, name, description, priv_level) VALUES (1, 'disallowed','Block all access',0) ON CONFLICT DO NOTHING;
+INSERT INTO role (id, name, description, priv_level) VALUES (2, 'read-only user','Block all access', 10) ON CONFLICT DO NOTHING;
+INSERT INTO role (id, name, description, priv_level) VALUES (3, 'operations','Block all access', 20) ON CONFLICT DO NOTHING;
+INSERT INTO role (id, name, description, priv_level) VALUES (4, 'admin','super-user', 30) ON CONFLICT DO NOTHING;
+INSERT INTO role (id, name, description, priv_level) VALUES (5, 'portal','Portal User', 2) ON CONFLICT DO NOTHING;
+INSERT INTO role (id, name, description, priv_level) VALUES (7, 'federation','Role for Secondary CZF', 15) ON CONFLICT DO NOTHING;
+`
+	err := execSQL(db, sqlStmt, "role")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupTmusers ...
+func SetupTmusers(db *sql.DB) error {
+
+	var err error
+	encryptedPassword, err := auth.DerivePassword(Config.TrafficOps.UserPassword)
+	if err != nil {
+		return fmt.Errorf("password encryption failed %v", err)
+	}
+
+	// Creates users in different tenants
+	sqlStmt := `
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.Disallowed + `','` + encryptedPassword + `','` + encryptedPassword + `', 1, 3);
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.ReadOnly + `','` + encryptedPassword + `','` + encryptedPassword + `', 2, 3);
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.Operations + `','` + encryptedPassword + `','` + encryptedPassword + `', 3, 3);
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.Admin + `','` + encryptedPassword + `','` + encryptedPassword + `', 4, 2);
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.Portal + `','` + encryptedPassword + `','` + encryptedPassword + `', 5, 3);
+INSERT INTO tm_user (username, local_passwd, confirm_local_passwd, role, tenant_id) VALUES ('` + Config.TrafficOps.Users.Federation + `','` + encryptedPassword + `','` + encryptedPassword + `', 7, 3);
+`
+	err = execSQL(db, sqlStmt, "tm_user")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupTenants ...
+func SetupTenants(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO tenant (id, name, active, parent_id, last_updated) VALUES (1, 'root', true, null, '2018-01-19 19:01:21.327262');
+INSERT INTO tenant (id, name, active, parent_id, last_updated) VALUES (2, 'grandparent tenant', true, 1, '2018-01-19 19:01:21.327262');
+INSERT INTO tenant (id, name, active, parent_id, last_updated) VALUES (3, 'parent tenant', true, 2, '2018-01-19 19:01:21.327262');
+INSERT INTO tenant (id, name, active, parent_id, last_updated) VALUES (4, 'child tenant', true, 3, '2018-01-19 19:01:21.327262');
+`
+	err := execSQL(db, sqlStmt, "tenant")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupCDNs ...
+func SetupCDNs(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO cdn (id, name, last_updated, dnssec_enabled, domain_name) VALUES (100, 'cdn5', '2018-01-19 21:19:31.588795', false, 'cdn1.kabletown.net');
+INSERT INTO cdn (id, name, last_updated, dnssec_enabled, domain_name) VALUES (200, 'cdn6', '2018-01-19 21:19:31.591457', false, 'cdn2.kabletown.net');
+INSERT INTO cdn (id, name, last_updated, dnssec_enabled, domain_name) VALUES (300, 'cdn7', '2018-01-19 21:19:31.592700', false, 'cdn3.kabletown.net');
+`
+	err := execSQL(db, sqlStmt, "cdn")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupStatuses ...
+func SetupStatuses(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO status (id, name, description, last_updated) VALUES (1, 'OFFLINE', 'Edge: Puts server in CCR config file in this state, but CCR will never route traffic to it. Mid: Server will not be included in parent.config files for its edge caches', '2018-01-19 19:01:21.388399');
+INSERT INTO status (id, name, description, last_updated) VALUES (2, 'ONLINE', 'Edge: Puts server in CCR config file in this state, and CCR will always route traffic to it. Mid: Server will be included in parent.config files for its edges', '2018-01-19 19:01:21.384459');
+INSERT INTO status (id, name, description, last_updated) VALUES (3, 'REPORTED', 'Edge: Puts server in CCR config file in this state, and CCR will adhere to the health protocol. Mid: N/A for now', '2018-01-19 19:01:21.379811');
+INSERT INTO status (id, name, description, last_updated) VALUES (4, 'ADMIN_DOWN', 'Temporary down. Edge: XMPP client will send status OFFLINE to CCR, otherwise similar to REPORTED. Mid: Server will not be included in parent.config files for its edge caches', '2018-01-19 19:01:21.385798');
+INSERT INTO status (id, name, description, last_updated) VALUES (5, 'CCR_IGNORE', 'Edge: 12M will not include caches in this state in CCR config files. Mid: N/A for now', '2018-01-19 19:01:21.383085');
+INSERT INTO status (id, name, description, last_updated) VALUES (6, 'PRE_PROD', 'Pre Production. Not active in any configuration.', '2018-01-19 19:01:21.387146');
+`
+	err := execSQL(db, sqlStmt, "status")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupCacheGroups ...
+func SetupCacheGroups(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO cachegroup (id, name, short_name, latitude, longitude, parent_cachegroup_id, secondary_parent_cachegroup_id, type, last_updated) VALUES (100, 'mid-northeast-group', 'ne', 120, 120, null, null, 2, '2018-01-19 21:19:32.041913');
+INSERT INTO cachegroup (id, name, short_name, latitude, longitude, parent_cachegroup_id, secondary_parent_cachegroup_id, type, last_updated) VALUES (200, 'mid-northwest-group', 'nw', 100, 100, 100, null, 2, '2018-01-19 21:19:32.052005');
+INSERT INTO cachegroup (id, name, short_name, latitude, longitude, parent_cachegroup_id, secondary_parent_cachegroup_id, type, last_updated) VALUES (800, 'mid_cg3', 'mid_cg3', 100, 100, null, null, 6, '2018-01-19 21:19:32.056908');
+INSERT INTO cachegroup (id, name, short_name, latitude, longitude, parent_cachegroup_id, secondary_parent_cachegroup_id, type, last_updated) VALUES (900, 'edge_cg4', 'edge_cg4', 100, 100, 800, null, 5, '2018-01-19 21:19:32.059077');
+INSERT INTO cachegroup (id, name, short_name, latitude, longitude, parent_cachegroup_id, secondary_parent_cachegroup_id, type, last_updated) VALUES (300, 'edge_atl_group', 'atl', 120, 120, 100, 200, 5, '2018-01-19 21:19:32.063375');
+`
+	err := execSQL(db, sqlStmt, "cachegroup")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupPhysLocations ...
+func SetupPhysLocations(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO phys_location (id, name, short_name, address, city, state, zip, poc, phone, email, comments, region, last_updated) VALUES (100, 'Denver', 'denver', '1234 mile high circle', 'Denver', 'CO', '80202', null, '303-111-1111', null, null, 100, '2018-01-19 21:19:32.081465');
+INSERT INTO phys_location (id, name, short_name, address, city, state, zip, poc, phone, email, comments, region, last_updated) VALUES (200, 'Boulder', 'boulder', '1234 green way', 'Boulder', 'CO', '80301', null, '303-222-2222', null, null, 100, '2018-01-19 21:19:32.086195');
+INSERT INTO phys_location (id, name, short_name, address, city, state, zip, poc, phone, email, comments, region, last_updated) VALUES (300, 'HotAtlanta', 'atlanta', '1234 southern way', 'Atlanta', 'GA', '30301', null, '404-222-2222', null, null, 100, '2018-01-19 21:19:32.089538');
+`
+	err := execSQL(db, sqlStmt, "phys_location")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupServers ...
+func SetupServers(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (100, 'atlanta-edge-01', 'ga.atlanta.kabletown.net', 80, 'atlanta-edge-01\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.1', '255.255.255.252', '127.0.0.1', '2345:1234:12:8::2/64', '2345:1234:12:8::1', 9000, 100, 'RR 119.02', 300, 1, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.094746', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1000, 'influxdb02', 'kabletown.net', 8086, '', '', 'eth1', '127.0.0.11', '255.255.252.0', '127.0.0.11', '127.0.0.11', '127.0.0.11', 1500, 300, 'RR 119.02', 100, 32, 2, null, false, 500, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.115164', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1100, 'atlanta-router-01', 'ga.atlanta.kabletown.net', 80, 'atlanta-router-01\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.12', '255.255.255.252', '127.0.0.1', '2345:1234:12:8::10/64', '2345:1234:12:8::1', 9000, 100, 'RR 119.02', 300, 4, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.125603', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1200, 'atlanta-edge-03', 'ga.atlanta.kabletown.net', 80, 'atlanta-edge-03\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.13', '255.255.255.252', '127.0.0.1', '2345:1234:12:2::2/64', '2345:1234:12:8::1', 9000, 100, 'RR 119.02', 300, 1, 3, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.135422', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1300, 'atlanta-edge-14', 'ga.atlanta.kabletown.net', 80, 'atlanta-edge-14\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.14', '255.255.255.252', '127.0.0.1', '2345:1234:12:8::14/64', '2345:1234:12:8::1', 9000, 100, 'RR 119.02', 900, 1, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.145252', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1400, 'atlanta-edge-15', 'ga.atlanta.kabletown.net', 80, 'atlanta-edge-15\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.15', '255.255.255.252', '127.0.0.7', '2345:1234:12:d::15/64', '2345:1234:12:d::1', 9000, 100, 'RR 119.02', 900, 1, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.155043', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1500, 'atlanta-mid-16', 'ga.atlanta.kabletown.net', 80, 'atlanta-mid-16\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.16', '255.255.255.252', '127.0.0.7', '2345:1234:12:d::16/64', '2345:1234:12:d::1', 9000, 100, 'RR 119.02', 800, 2, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.164825', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1600, 'atlanta-org-1', 'ga.atlanta.kabletown.net', 80, 'atlanta-org-1\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.17', '255.255.255.252', '127.0.0.17', '2345:1234:12:d::17/64', '2345:1234:12:d::1', 9000, 100, 'RR 119.02', 800, 3, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.167782', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (1700, 'atlanta-org-2', 'ga.atlanta.kabletown.net', 80, 'atlanta-org-1\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.18', '255.255.255.252', '127.0.0.18', '2345:1234:12:d::18/64', '2345:1234:12:d::1', 9000, 100, 'RR 119.02', 800, 3, 2, null, false, 900, 200, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.170592', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (200, 'atlanta-mid-01', 'ga.atlanta.kabletown.net', 80, 'atlanta-mid-01\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.2', '255.255.255.252', '127.0.0.2', '2345:1234:12:9::2/64', '2345:1234:12:9::1', 9000, 100, 'RR 119.02', 100, 2, 2, null, false, 200, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.173304', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (300, 'rascal01', 'kabletown.net', 81, 'rascal\@kabletown.net', 'X', 'bond0', '127.0.0.4', '255.255.255.252', '127.0.0.4', '2345:1234:12:b::2/64', '2345:1234:12:b::1', 9000, 100, 'RR 119.02', 100, 4, 2, null, false, 300, 200, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.176375', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (400, 'riak01', 'kabletown.net', 8088, '', '', 'eth1', '127.0.0.5', '255.255.252.0', '127.0.0.5', '', '', 1500, 100, 'RR 119.02', 100, 31, 2, null, false, 500, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.180698', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (500, 'rascal02', 'kabletown.net', 81, 'rascal\@kabletown.net', 'X', 'bond0', '127.0.0.6', '255.255.255.252', '127.0.0.6', '2345:1234:12:c::2/64', '2345:1234:12:c::1', 9000, 100, 'RR 119.05', 100, 4, 2, null, false, 300, 200, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.184322', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (600, 'atlanta-edge-02', 'ga.atlanta.kabletown.net', 80, 'atlanta-edge-02\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.7', '255.255.255.252', '127.0.0.7', '2345:1234:12:d::2/64', '2345:1234:12:d::1', 9000, 100, 'RR 119.02', 300, 1, 2, null, false, 100, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.187856', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (700, 'atlanta-mid-02', 'ga.atlanta.kabletown.net', 80, 'atlanta-mid-02\@ocdn.kabletown.net', 'X', 'bond0', '127.0.0.8', '255.255.255.252', '127.0.0.8', '2345:1234:12:e::2/64', '2345:1234:12:e::1', 9000, 200, 'RR 119.02', 200, 2, 2, null, false, 200, 200, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.191292', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (800, 'riak02', 'kabletown.net', 8088, '', '', 'eth1', '127.0.0.9', '255.255.252.0', '127.0.0.9', '2345:1234:12:f::2/64', '2345:1234:12:f::1', 1500, 200, 'RR 119.02', 100, 31, 2, null, false, 500, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.194538', null, false);
+INSERT INTO server (id, host_name, domain_name, tcp_port, xmpp_id, xmpp_passwd, interface_name, ip_address, ip_netmask, ip_gateway, ip6_address, ip6_gateway, interface_mtu, phys_location, rack, cachegroup, type, status, offline_reason, upd_pending, profile, cdn_id, mgmt_ip_address, mgmt_ip_netmask, mgmt_ip_gateway, ilo_ip_address, ilo_ip_netmask, ilo_ip_gateway, ilo_username, ilo_password, router_host_name, router_port_name, guid, last_updated, https_port, reval_pending) VALUES (900, 'influxdb01', 'kabletown.net', 8086, '', '', 'eth1', '127.0.0.10', '255.255.252.0', '127.0.0.10', '127.0.0.10', '127.0.0.10', 1500, 300, 'RR 119.02', 100, 32, 2, null, false, 500, 100, '', '', '', '', '', '', '', '', '', '', null, '2018-01-19 21:19:32.197808', null, false);
+`
+	err := execSQL(db, sqlStmt, "servers")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupProfiles ...
+func SetupProfiles(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (100, 'EDGE1', 'edge description', '2018-01-19 19:01:21.512005', 'ATS_PROFILE', 100, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (200, 'MID1', 'mid description', '2018-01-19 19:01:21.517781', 'ATS_PROFILE', 100, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (300, 'CCR1', 'ccr description', '2018-01-19 19:01:21.521121', 'TR_PROFILE', 100, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (301, 'CCR2', 'ccr description', '2018-01-19 19:01:21.524584', 'TR_PROFILE', 200, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (500, 'RIAK1', 'riak description', '2018-01-19 19:01:21.528911', 'RIAK_PROFILE', 100, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (600, 'RASCAL1', 'rascal description', '2018-01-19 19:01:21.532539', 'TM_PROFILE', 100, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (700, 'RASCAL2', 'rascal2 description', '2018-01-19 19:01:21.536447', 'TM_PROFILE', 200, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (8, 'MISC', 'misc profile description', '2018-01-19 19:01:21.539022', 'UNK_PROFILE', null, false);
+INSERT INTO profile (id, name, description, last_updated, type, cdn, routing_disabled) VALUES (900, 'EDGE2', 'edge description', '2018-01-19 19:01:21.541300', 'ATS_PROFILE', 200, false);
+`
+	err := execSQL(db, sqlStmt, "profile")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupProfileParameters ...
+func SetupProfileParameters(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 43, '2018-01-19 19:01:21.556526');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 19, '2018-01-19 19:01:21.566442');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 35, '2018-01-19 19:01:21.571364');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 49, '2018-01-19 19:01:21.575178');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 61, '2018-01-19 19:01:21.578744');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 9, '2018-01-19 19:01:21.582534');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 46, '2018-01-19 19:01:21.586388');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 35, '2018-01-19 19:01:21.588145');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 16, '2018-01-19 19:01:21.589542');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 57, '2018-01-19 19:01:21.591061');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 48, '2018-01-19 19:01:21.592700');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 60, '2018-01-19 19:01:21.594185');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 31, '2018-01-19 19:01:21.595700');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 49, '2018-01-19 19:01:21.597212');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 4, '2018-01-19 19:01:21.598744');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 36, '2018-01-19 19:01:21.600582');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 27, '2018-01-19 19:01:21.602214');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 41, '2018-01-19 19:01:21.604015');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 16, '2018-01-19 19:01:21.605612');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 17, '2018-01-19 19:01:21.607234');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 21, '2018-01-19 19:01:21.609358');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 41, '2018-01-19 19:01:21.611101');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 32, '2018-01-19 19:01:21.613078');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 32, '2018-01-19 19:01:21.614943');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 28, '2018-01-19 19:01:21.616641');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 6, '2018-01-19 19:01:21.618677');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 66, '2018-01-19 19:01:21.620617');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 58, '2018-01-19 19:01:21.622399');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 28, '2018-01-19 19:01:21.623955');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 56, '2018-01-19 19:01:21.625664');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 23, '2018-01-19 19:01:21.627471');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 11, '2018-01-19 19:01:21.629284');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 30, '2018-01-19 19:01:21.630989');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 22, '2018-01-19 19:01:21.632523');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 23, '2018-01-19 19:01:21.634278');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 37, '2018-01-19 19:01:21.635945');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 25, '2018-01-19 19:01:21.637627');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 38, '2018-01-19 19:01:21.639252');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 52, '2018-01-19 19:01:21.640775');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 29, '2018-01-19 19:01:21.642278');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 12, '2018-01-19 19:01:21.644071');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 45, '2018-01-19 19:01:21.645614');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 60, '2018-01-19 19:01:21.647126');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 26, '2018-01-19 19:01:21.648787');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 57, '2018-01-19 19:01:21.650507');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 13, '2018-01-19 19:01:21.652142');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 27, '2018-01-19 19:01:21.653714');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 26, '2018-01-19 19:01:21.655383');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 39, '2018-01-19 19:01:21.657078');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 12, '2018-01-19 19:01:21.658901');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 25, '2018-01-19 19:01:21.661010');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 21, '2018-01-19 19:01:21.662865');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 33, '2018-01-19 19:01:21.664561');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 38, '2018-01-19 19:01:21.666336');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 34, '2018-01-19 19:01:21.668286');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 58, '2018-01-19 19:01:21.670053');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 24, '2018-01-19 19:01:21.671744');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 43, '2018-01-19 19:01:21.673493');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 5, '2018-01-19 19:01:21.675218');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 37, '2018-01-19 19:01:21.676721');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 51, '2018-01-19 19:01:21.678334');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 19, '2018-01-19 19:01:21.679937');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 6, '2018-01-19 19:01:21.681398');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 18, '2018-01-19 19:01:21.682983');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 42, '2018-01-19 19:01:21.684568');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 7, '2018-01-19 19:01:21.686083');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 56, '2018-01-19 19:01:21.687549');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 13, '2018-01-19 19:01:21.689131');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 22, '2018-01-19 19:01:21.690719');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 36, '2018-01-19 19:01:21.692254');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 53, '2018-01-19 19:01:21.693745');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 40, '2018-01-19 19:01:21.695556');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 51, '2018-01-19 19:01:21.697784');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 46, '2018-01-19 19:01:21.699385');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 11, '2018-01-19 19:01:21.701103');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 54, '2018-01-19 19:01:21.702727');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 17, '2018-01-19 19:01:21.704304');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 53, '2018-01-19 19:01:21.705942');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 10, '2018-01-19 19:01:21.707676');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 8, '2018-01-19 19:01:21.709391');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 39, '2018-01-19 19:01:21.711213');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 40, '2018-01-19 19:01:21.713199');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 29, '2018-01-19 19:01:21.715051');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 59, '2018-01-19 19:01:21.716817');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 47, '2018-01-19 19:01:21.718642');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 44, '2018-01-19 19:01:21.720315');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 9, '2018-01-19 19:01:21.722063');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 8, '2018-01-19 19:01:21.723607');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 20, '2018-01-19 19:01:21.725403');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 48, '2018-01-19 19:01:21.727060');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 55, '2018-01-19 19:01:21.728640');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 10, '2018-01-19 19:01:21.730182');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 45, '2018-01-19 19:01:21.731780');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 15, '2018-01-19 19:01:21.733368');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 33, '2018-01-19 19:01:21.734950');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 50, '2018-01-19 19:01:21.736646');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 52, '2018-01-19 19:01:21.738319');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 14, '2018-01-19 19:01:21.739900');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 14, '2018-01-19 19:01:21.741450');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 18, '2018-01-19 19:01:21.743105');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 61, '2018-01-19 19:01:21.744826');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 44, '2018-01-19 19:01:21.746391');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 55, '2018-01-19 19:01:21.747999');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 59, '2018-01-19 19:01:21.749519');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 34, '2018-01-19 19:01:21.751253');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 24, '2018-01-19 19:01:21.753005');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 7, '2018-01-19 19:01:21.754576');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 15, '2018-01-19 19:01:21.757250');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 47, '2018-01-19 19:01:21.759781');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 54, '2018-01-19 19:01:21.761829');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 42, '2018-01-19 19:01:21.763902');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 50, '2018-01-19 19:01:21.765912');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (100, 31, '2018-01-19 19:01:21.767998');
+INSERT INTO profile_parameter (profile, parameter, last_updated) VALUES (200, 20, '2018-01-19 19:01:21.769919');
+`
+	err := execSQL(db, sqlStmt, "profile_parameter")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupDivisions ...
+func SetupDivisions(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO division (id, name, last_updated) VALUES (100, 'mountain', '2018-01-19 19:01:21.851102');
+`
+	err := execSQL(db, sqlStmt, "division")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupRegions ...
+func SetupRegions(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO region (id, name, division, last_updated) VALUES (100, 'Denver Region', 100, '2018-01-19 19:01:21.859430');
+INSERT INTO region (id, name, division, last_updated) VALUES (200, 'Boulder Region', 100, '2018-01-19 19:01:21.854509');
+`
+	err := execSQL(db, sqlStmt, "region")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupAsns ...
+func SetupAsns(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO asn (id, asn, cachegroup, last_updated) VALUES (100, 9939, 100, '2018-01-19 19:01:21.995075');
+INSERT INTO asn (id, asn, cachegroup, last_updated) VALUES (200, 9940, 200, '2018-01-19 19:01:22.005683');
+`
+	err := execSQL(db, sqlStmt, "asn")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupDeliveryServices ...
+func SetupDeliveryServices(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (100, 'test-ds1', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds1.edge', 21, 100, 100, 3600, 0, 0, 'test-ds1 long_desc', 'test-ds1 long_desc_1', 'test-ds1 long_desc_2', 0, 'http://test-ds1.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.217466', 1, 0, true, 0, null, null, null, null, null, null, false, 'test-ds1-displayname', null, 1, null, null, true, 0, null, true, null, null, 2, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (1000, 'steering-target-ds1', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://target-ds1.edge', 21, 100, 100, 3600, 0, 0, 'target-ds1 long_desc', 'target-ds1 long_desc_1', 'target-ds1 long_desc_2', 0, 'http://target-ds1.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.226858', 1, 0, true, 0, null, null, null, null, null, null, false, 'target-ds1-displayname', null, 1, null, null, true, 0, null, false, null, null, 2, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (1100, 'steering-target-ds2', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://target-ds2.edge', 21, 100, 100, 3600, 0, 0, 'target-ds2 long_desc', 'target-ds2 long_desc_1', 'target-ds2 long_desc_2', 0, 'http://target-ds2.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.235025', 1, 0, true, 0, null, null, null, null, null, null, false, 'target-ds2-displayname', null, 1, null, null, true, 0, null, false, null, null, 2, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (1200, 'steering-target-ds3', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://target-ds3.edge', 21, 100, 100, 3600, 0, 0, 'target-ds3 long_desc', 'target-ds3 long_desc_1', 'target-ds3 long_desc_2', 0, 'http://target-ds3.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.241327', 1, 0, true, 0, null, null, null, null, null, null, false, 'target-ds3-displayname', null, 1, null, null, true, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (1300, 'steering-target-ds4', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://target-ds4.edge', 21, 100, 100, 3600, 0, 0, 'target-ds4 long_desc', 'target-ds4 long_desc_1', 'target-ds4 long_desc_2', 0, 'http://target-ds4.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.247731', 1, 0, true, 0, null, null, null, null, null, null, false, 'target-ds4-displayname', null, 1, null, null, true, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (200, 'test-ds2', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds2.edge', 9, 100, 100, 3600, 0, 0, 'test-ds2 long_desc', 'test-ds2 long_desc_1', 'test-ds2 long_desc_2', 0, 'http://test-ds2.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.253469', 0, 0, false, 0, null, null, null, null, null, null, false, 'test-ds2-displayname', null, 1, null, null, false, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (2100, 'test-ds1-root', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds1-root.edge', 21, 100, 100, 3600, 0, 0, 'test-ds1-root long_desc', 'test-ds1-root long_desc_1', 'test-ds1-root long_desc_2', 0, 'http://test-ds1-root.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.261210', 1, 0, true, 0, null, null, null, null, null, null, false, 'test-ds1-root-displayname', null, 1, null, null, true, 0, null, true, null, null, 1, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (2200, 'xxfoo.bar', true, 40, null, 0, 0, '', '', '', -1, 'http://foo.bar.edge', 21, 100, 100, 3600, 0, 0, 'foo.bar long_desc', 'foo.bar long_desc_1', 'foo.bar long_desc_2', 0, 'http://foo.bar.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.265717', 1, 0, true, 0, null, null, null, null, null, null, false, 'foo.bar-displayname', null, 1, null, null, true, 0, null, true, null, null, 2, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (300, 'test-ds3', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds3.edge', 9, 100, 100, 3600, 0, 0, 'test-ds3 long_desc', 'test-ds3 long_desc_1', 'test-ds3 long_desc_2', 0, 'http://test-ds3.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.269358', 0, 0, false, 0, null, null, null, null, null, null, false, 'test-ds3-displayname', null, 1, null, null, false, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (400, 'test-ds4', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds4.edge', 9, 100, 100, 3600, 0, 0, 'test-ds4 long_desc', 'test-ds4 long_desc_1', 'test-ds4 long_desc_2', 0, 'http://test-ds4.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.272467', 0, 0, false, 0, null, null, null, null, null, null, false, 'test-ds4-displayname', null, 1, null, null, false, 0, null, true, null, null, 4, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (500, 'test-ds5', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds5.edge', 7, 300, 100, 3600, 0, 0, 'test-ds5 long_desc', 'test-ds5 long_desc_1', 'test-ds5 long_desc_2', 0, 'http://test-ds5.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.275400', 0, 0, false, 0, null, null, null, null, null, null, false, 'test-ds5-displayname', null, 1, null, null, false, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (600, 'test-ds6', true, 40, null, 0, 0, '', '', '', -1, 'http://test-ds6.edge', 9, 300, 100, 3600, 0, 0, 'test-ds6 long_desc', 'test-ds6 long_desc_1', 'test-ds6 long_desc_2', 0, 'http://test-ds6.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.278451', 0, 0, false, 0, null, null, null, null, null, null, false, 'test-ds6-displayname', null, 1, null, null, false, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (700, 'steering-ds1', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://steering-ds1.edge', 21, 100, 100, 3600, 0, 0, 'steering-ds1 long_desc', 'steering-ds1 long_desc_1', 'steering-ds1 long_desc_2', 0, 'http://steering-ds1.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.281466', 1, 0, true, 0, null, null, null, null, null, null, false, 'steering-ds1-displayname', null, 1, null, null, true, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (800, 'steering-ds2', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://steering-ds2.edge', 21, 100, 100, 3600, 0, 0, 'steering-ds2 long_desc', 'steering-ds2 long_desc_1', 'steering-ds2 long_desc_2', 0, 'http://steering-ds2.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.284567', 1, 0, true, 0, null, null, null, null, null, null, false, 'steering-ds2-displayname', null, 1, null, null, true, 0, null, false, null, null, 3, 'foo');
+INSERT INTO deliveryservice (id, xml_id, active, dscp, signing_algorithm, qstring_ignore, geo_limit, http_bypass_fqdn, dns_bypass_ip, dns_bypass_ip6, dns_bypass_ttl, org_server_fqdn, type, profile, cdn_id, ccr_dns_ttl, global_max_mbps, global_max_tps, long_desc, long_desc_1, long_desc_2, max_dns_answers, info_url, miss_lat, miss_long, check_path, last_updated, protocol, ssl_key_version, ipv6_routing_enabled, range_request_handling, edge_header_rewrite, origin_shield, mid_header_rewrite, regex_remap, cacheurl, remap_text, multi_site_origin, display_name, tr_response_headers, initial_dispersion, dns_bypass_cname, tr_request_headers, regional_geo_blocking, geo_provider, geo_limit_countries, logs_enabled, multi_site_origin_algorithm, geolimit_redirect_url, tenant_id, routing_name) VALUES (900, 'steering-ds3', true, 40, null, 0, 0, '', 'hokeypokey', null, 10, 'http://new-steering-ds.edge', 21, 100, 100, 3600, 0, 0, 'new-steering-ds long_desc', 'new-steering-ds long_desc_1', 'new-steering-ds long_desc_2', 0, 'http://new-steering-ds.edge/info_url.html', 41.881944, -87.627778, '/crossdomain.xml', '2018-01-19 21:19:32.287726', 1, 0, true, 0, null, null, null, null, null, null, false, 'new-steering-ds-displayname', null, 1, null, null, true, 0, null, false, null, null, 4, 'foo');
+`
+	err := execSQL(db, sqlStmt, "deliveryservice")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupRegexes ...
+func SetupRegexes(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (100, '.*\.omg-01\..*', 19, '2018-01-19 21:58:36.120746');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1000, '.*\.target-ds1\..*', 19, '2018-01-19 21:58:36.125624');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1100, '.*\.target-ds2\..*', 19, '2018-01-19 21:58:36.128372');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1200, '.*\.target-ds3\..*', 19, '2018-01-19 21:58:36.130749');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1300, '.*\.target-ds4\..*', 19, '2018-01-19 21:58:36.133992');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1400, '.*\.target-ds5\..*', 19, '2018-01-19 21:58:36.136503');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1500, '.*\.target-ds6\..*', 19, '2018-01-19 21:58:36.138890');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1600, '.*\.target-ds7\..*', 19, '2018-01-19 21:58:36.140495');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1700, '.*\.target-ds8\..*', 19, '2018-01-19 21:58:36.142473');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1800, '.*\.target-ds9\..*', 19, '2018-01-19 21:58:36.144087');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (1900, '.*\.target-ds10\..*', 19, '2018-01-19 21:58:36.145505');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (200, '.*\.foo\..*', 19, '2018-01-19 21:58:36.146953');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (300, '.*/force-to-one/.*', 20, '2018-01-19 21:58:36.149052');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (400, '.*/force-to-one-also/.*', 20, '2018-01-19 21:58:36.150904');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (500, '.*/go-to-four/.*', 20, '2018-01-19 21:58:36.152416');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (600, '.*/use-three/.*', 20, '2018-01-19 21:58:36.153884');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (700, '.*\.new-steering-ds\..*', 19, '2018-01-19 21:58:36.155352');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (800, '.*\.steering-ds1\..*', 19, '2018-01-19 21:58:36.156867');
+INSERT INTO regex (id, pattern, type, last_updated) VALUES (900, '.*\.steering-ds2\..*', 19, '2018-01-19 21:58:36.158999');
+`
+	err := execSQL(db, sqlStmt, "regex")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupDeliveryServiceRegexes ...
+func SetupDeliveryServiceRegexes(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (200, 100, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (400, 100, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (400, 1000, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (500, 1100, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (600, 1200, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (700, 1300, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (800, 1400, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (900, 1500, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (1000, 1600, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (1100, 1700, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (1200, 1800, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (1300, 1900, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (100, 200, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (400, 200, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (700, 300, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (600, 400, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (300, 600, 0);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (100, 800, 1);
+INSERT INTO deliveryservice_regex (deliveryservice, regex, set_number) VALUES (200, 900, 0);
+`
+	err := execSQL(db, sqlStmt, "deliveryservice_regex")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupDeliveryServiceTmUsers ...
+func SetupDeliveryServiceTmUsers(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO deliveryservice_tmuser (deliveryservice, tm_user_id, last_updated) VALUES (100, (SELECT id FROM tm_user where username = 'admin') , '2018-01-19 21:19:32.372969');
+`
+	err := execSQL(db, sqlStmt, "deliveryservice_tmuser")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupDeliveryServiceServers ...
+func SetupDeliveryServiceServers(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (100, 300, '2018-01-19 21:19:32.396609');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (100, 1300, '2018-01-19 21:19:32.408819');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (100, 100, '2018-01-19 21:19:32.414612');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (200, 800, '2018-01-19 21:19:32.420745');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (200, 700, '2018-01-19 21:19:32.426505');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (500, 1500, '2018-01-19 21:19:32.434097');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (500, 1400, '2018-01-19 21:19:32.439622');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (600, 1400, '2018-01-19 21:19:32.440831');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (600, 1500, '2018-01-19 21:19:32.442121');
+INSERT INTO deliveryservice_server (deliveryservice, server, last_updated) VALUES (700, 900, '2018-01-19 21:19:32.443372');
+`
+	err := execSQL(db, sqlStmt, "deliveryservice_server")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupJobStatuses ...
+func SetupJobStatuses(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO job_status (id, name, description, last_updated) VALUES (1, 'PENDING', 'Job is queued, but has not been picked up by any agents yet', '2018-01-19 21:19:32.444857');
+`
+	err := execSQL(db, sqlStmt, "job_status")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupJobAgents ...
+func SetupJobAgents(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO job_agent (id, name, description, active, last_updated) VALUES (1, 'agent1', 'Test Agent1', 0, '2018-01-19 21:19:32.448076');
+`
+	err := execSQL(db, sqlStmt, "job_agent")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// SetupJobs ...
+func SetupJobs(db *sql.DB) error {
+
+	sqlStmt := `
+INSERT INTO job (id, agent, object_type, object_name, keyword, parameters, asset_url, asset_type, status, start_time, entered_time, job_user, last_updated, job_deliveryservice) VALUES (100, 1, null, null, 'PURGE', 'TTL:48h', 'http://cdn2.edge/job1/.*', 'file', 1, '2018-01-19 21:01:14.000000', '2018-01-19 21:01:14.000000', (SELECT id FROM tm_user where username = 'admin'), '2018-01-19 21:19:32.468643', 100);
+INSERT INTO job (id, agent, object_type, object_name, keyword, parameters, asset_url, asset_type, status, start_time, entered_time, job_user, last_updated, job_deliveryservice) VALUES (200, 1, null, null, 'PURGE', 'TTL:48h', 'http://cdn2.edge/job2/.*', 'file', 1, '2018-01-19 21:09:34.000000', '2018-01-19 21:09:34.000000', (SELECT id FROM tm_user where username = 'admin'), '2018-01-19 21:19:32.450915', 200);
+INSERT INTO job (id, agent, object_type, object_name, keyword, parameters, asset_url, asset_type, status, start_time, entered_time, job_user, last_updated, job_deliveryservice) VALUES (300, 1, null, null, 'PURGE', 'TTL:48h', 'http://cdn2.edge/job3/.*', 'file', 1, '2018-01-19 21:14:34.000000', '2018-01-19 21:14:34.000000', (SELECT id FROM tm_user where username = 'admin'), '2018-01-19 21:19:32.460870', 100);
+`
+	err := execSQL(db, sqlStmt, "job")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return nil
+}
+
+// Teardown - ensures that the data is cleaned up for a fresh run
+func Teardown(db *sql.DB) error {
+
+	sqlStmt := `
+	DELETE FROM to_extension;
+	DELETE FROM staticdnsentry;
+	DELETE FROM job;
+	DELETE FROM job_agent;
+	DELETE FROM job_status;
+	DELETE FROM log;
+	DELETE FROM asn;
+	DELETE FROM deliveryservice_tmuser;
+	DELETE FROM tm_user;
+	DELETE FROM role;
+	DELETE FROM deliveryservice_regex;
+	DELETE FROM regex;
+	DELETE FROM deliveryservice_server;
+	DELETE FROM deliveryservice;
+	DELETE FROM server;
+	DELETE FROM phys_location;
+	DELETE FROM region;
+	DELETE FROM division;
+	DELETE FROM profile;
+	DELETE FROM parameter;
+	DELETE FROM profile_parameter;
+	DELETE FROM cachegroup;
+	DELETE FROM type;
+	DELETE FROM status;
+	DELETE FROM snapshot;
+	DELETE FROM cdn;
+	DELETE FROM tenant;
+`
+	err := execSQL(db, sqlStmt, "Tearing down")
+	if err != nil {
+		return fmt.Errorf("exec failed %v", err)
+	}
+	return err
+}
+
+// execSQL ...
+func execSQL(db *sql.DB, sqlStmt string, dbTable string) error {
+
+	log.Debugln(dbTable + " data")
+	var err error
+
+	tx, err := db.Begin()
+	if err != nil {
+		return fmt.Errorf("transaction begin failed %v %v ", err, tx)
+	}
+
+	res, err := tx.Exec(sqlStmt)
+	if err != nil {
+		return fmt.Errorf("exec failed %v %v", err, res)
+	}
+
+	err = tx.Commit()
+	if err != nil {
+		return fmt.Errorf("commit failed %v %v", err, res)
+	}
+	return nil
+}
diff --git a/traffic_ops/testing/api/v13/traffic_ops_test.go b/traffic_ops/testing/api/v13/traffic_ops_test.go
index 68fa773a02..5d00d8240a 100644
--- a/traffic_ops/testing/api/v13/traffic_ops_test.go
+++ b/traffic_ops/testing/api/v13/traffic_ops_test.go
@@ -19,22 +19,18 @@ import (
 	"database/sql"
 	"flag"
 	"fmt"
-	"net"
 	"os"
 	"testing"
 	"time"
 
 	"github.com/apache/incubator-trafficcontrol/lib/go-log"
-	"github.com/apache/incubator-trafficcontrol/traffic_ops/client/v13"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/testing/api/config"
-	"github.com/apache/incubator-trafficcontrol/traffic_ops/testing/api/todb"
 	_ "github.com/lib/pq"
 )
 
 var (
-	TOSession *v13.Session
-	cfg       config.Config
-	testData  TrafficControl
+	Config   config.Config
+	testData TrafficControl
 )
 
 func TestMain(m *testing.M) {
@@ -43,11 +39,12 @@ func TestMain(m *testing.M) {
 	tcFixturesFileName := flag.String("fixtures", "tc-fixtures.json", "The test fixtures for the API test tool")
 	flag.Parse()
 
-	if cfg, err = config.LoadConfig(*configFileName); err != nil {
-		fmt.Printf("Error Loading Config %v %v\n", cfg, err)
+	if Config, err = config.LoadConfig(*configFileName); err != nil {
+		fmt.Printf("Error Loading Config %v %v\n", Config, err)
+		return
 	}
 
-	if err = log.InitCfg(cfg); err != nil {
+	if err = log.InitCfg(Config); err != nil {
 		fmt.Printf("Error initializing loggers: %v\n", err)
 		return
 	}
@@ -60,34 +57,35 @@ func TestMain(m *testing.M) {
 			   DB Server:            %s
 			   DB User:              %s
 			   DB Name:              %s
-			   DB Ssl:               %t`, *configFileName, *tcFixturesFileName, cfg.TrafficOps.URL, cfg.Default.Session.TimeoutInSecs, cfg.TrafficOpsDB.Hostname, cfg.TrafficOpsDB.User, cfg.TrafficOpsDB.Name, cfg.TrafficOpsDB.SSL)
+			   DB Ssl:               %t`, *configFileName, *tcFixturesFileName, Config.TrafficOps.URL, Config.Default.Session.TimeoutInSecs, Config.TrafficOpsDB.Hostname, Config.TrafficOpsDB.User, Config.TrafficOpsDB.Name, Config.TrafficOpsDB.SSL)
 
 	//Load the test data
 	LoadFixtures(*tcFixturesFileName)
 
 	var db *sql.DB
-	db, err = todb.OpenConnection(&cfg)
+	db, err = OpenConnection()
 	if err != nil {
-		fmt.Printf("\nError opening connection to %s - %s, %v\n", cfg.TrafficOps.URL, cfg.TrafficOps.User, err)
+		fmt.Printf("\nError opening connection to %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOpsDB.User, err)
 		os.Exit(1)
 	}
 	defer db.Close()
 
-	err = todb.Teardown(&cfg, db)
+	err = Teardown(db)
 	if err != nil {
-		fmt.Printf("\nError tearingdown data %s - %s, %v\n", cfg.TrafficOps.URL, cfg.TrafficOps.User, err)
+		fmt.Printf("\nError tearingdown data %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOpsDB.User, err)
 		os.Exit(1)
 	}
 
-	err = todb.SetupTestData(&cfg, db)
+	err = SetupTestData(db)
 	if err != nil {
-		fmt.Printf("\nError setting up data %s - %s, %v\n", cfg.TrafficOps.URL, cfg.TrafficOps.User, err)
+		fmt.Printf("\nError setting up data %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOpsDB.User, err)
 		os.Exit(1)
 	}
 
-	TOSession, _, err = SetupSession(cfg, cfg.TrafficOps.URL, cfg.TrafficOps.User, cfg.TrafficOps.UserPassword)
+	toReqTimeout := time.Second * time.Duration(Config.Default.Session.TimeoutInSecs)
+	err = SetupSession(toReqTimeout, Config.TrafficOps.URL, Config.TrafficOps.Users.Admin, Config.TrafficOps.UserPassword)
 	if err != nil {
-		fmt.Printf("\nError logging into TOURL: %s TOUser: %s/%s - %v\n", cfg.TrafficOps.URL, cfg.TrafficOps.User, cfg.TrafficOps.UserPassword, err)
+		fmt.Printf("\nError creating session to %s - %s, %v\n", Config.TrafficOps.URL, Config.TrafficOpsDB.User, err)
 		os.Exit(1)
 	}
 
@@ -96,16 +94,3 @@ func TestMain(m *testing.M) {
 	os.Exit(rc)
 
 }
-
-func SetupSession(cfg config.Config, toURL string, toUser string, toPass string) (*v13.Session, net.Addr, error) {
-	var err error
-	var session *v13.Session
-	var netAddr net.Addr
-	toReqTimeout := time.Second * time.Duration(cfg.Default.Session.TimeoutInSecs)
-	session, netAddr, err = v13.LoginWithAgent(toURL, toUser, toPass, true, "to-api-v13-client-tests", true, toReqTimeout)
-	if err != nil {
-		return nil, nil, err
-	}
-
-	return session, netAddr, err
-}
diff --git a/traffic_ops/traffic_ops_golang/parameter/parameters.go b/traffic_ops/traffic_ops_golang/parameter/parameters.go
new file mode 100644
index 0000000000..eb4bbb6b82
--- /dev/null
+++ b/traffic_ops/traffic_ops_golang/parameter/parameters.go
@@ -0,0 +1,374 @@
+package parameter
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import (
+	"errors"
+	"fmt"
+	"strconv"
+
+	"github.com/apache/incubator-trafficcontrol/lib/go-log"
+	"github.com/apache/incubator-trafficcontrol/lib/go-tc"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/api"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/auth"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/dbhelpers"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/tovalidate"
+	validation "github.com/go-ozzo/ozzo-validation"
+
+	"github.com/jmoiron/sqlx"
+	"github.com/lib/pq"
+)
+
+//we need a type alias to define functions on
+type TOParameter tc.ParameterNullable
+
+//the refType is passed into the handlers where a copy of its type is used to decode the json.
+var refType = TOParameter(tc.ParameterNullable{})
+
+func GetRefType() *TOParameter {
+	return &refType
+}
+
+//Implementation of the Identifier, Validator interface functions
+func (parameter *TOParameter) GetID() (int, bool) {
+	if parameter.ID == nil {
+		return 0, false
+	}
+	return *parameter.ID, true
+}
+
+func (parameter *TOParameter) GetAuditName() string {
+	if parameter.Name != nil {
+		return *parameter.Name
+	}
+	if parameter.ID != nil {
+		return strconv.Itoa(*parameter.ID)
+	}
+	return "unknown"
+}
+
+func (parameter *TOParameter) GetType() string {
+	return "parameter"
+}
+
+func (parameter *TOParameter) SetID(i int) {
+	parameter.ID = &i
+}
+
+// Validate fulfills the api.Validator interface
+func (parameter TOParameter) Validate(db *sqlx.DB) []error {
+
+	// Test
+	// - Secure Flag is always set to either 1/0
+	// - Admin rights only
+	// - Do not allow duplicate parameters by name+config_file+value
+	errs := validation.Errors{
+		"name":       validation.Validate(parameter.Name, validation.Required),
+		"configFile": validation.Validate(parameter.ConfigFile, validation.Required),
+		"value":      validation.Validate(parameter.Value, validation.Required),
+	}
+
+	return tovalidate.ToErrors(errs)
+}
+
+//The TOParameter implementation of the Creator interface
+//all implementations of Creator should use transactions and return the proper errorType
+//ParsePQUniqueConstraintError is used to determine if a parameter with conflicting values exists
+//if so, it will return an errorType of DataConflict and the type should be appended to the
+//generic error message returned
+//The insert sql returns the id and lastUpdated values of the newly inserted parameter and have
+//to be added to the struct
+func (pl *TOParameter) Create(db *sqlx.DB, user auth.CurrentUser) (error, tc.ApiErrorType) {
+	rollbackTransaction := true
+	tx, err := db.Beginx()
+	defer func() {
+		if tx == nil || !rollbackTransaction {
+			return
+		}
+		err := tx.Rollback()
+		if err != nil {
+			log.Errorln(errors.New("rolling back transaction: " + err.Error()))
+		}
+	}()
+
+	if err != nil {
+		log.Error.Printf("could not begin transaction: %v", err)
+		return tc.DBError, tc.SystemError
+	}
+	resultRows, err := tx.NamedQuery(insertQuery(), pl)
+	if err != nil {
+		if pqErr, ok := err.(*pq.Error); ok {
+			err, eType := dbhelpers.ParsePQUniqueConstraintError(pqErr)
+			if eType == tc.DataConflictError {
+				return errors.New("a parameter with " + err.Error()), eType
+			}
+			return err, eType
+		}
+		log.Errorf("received non pq error: %++v from create execution", err)
+		return tc.DBError, tc.SystemError
+	}
+	defer resultRows.Close()
+
+	var id int
+	var lastUpdated tc.TimeNoMod
+	rowsAffected := 0
+	for resultRows.Next() {
+		rowsAffected++
+		if err := resultRows.Scan(&id, &lastUpdated); err != nil {
+			log.Error.Printf("could not scan id from insert: %s\n", err)
+			return tc.DBError, tc.SystemError
+		}
+	}
+	if rowsAffected == 0 {
+		err = errors.New("no parameter was inserted, no id was returned")
+		log.Errorln(err)
+		return tc.DBError, tc.SystemError
+	}
+	if rowsAffected > 1 {
+		err = errors.New("too many ids returned from parameter insert")
+		log.Errorln(err)
+		return tc.DBError, tc.SystemError
+	}
+
+	pl.SetID(id)
+	pl.LastUpdated = &lastUpdated
+	err = tx.Commit()
+	if err != nil {
+		log.Errorln("Could not commit transaction: ", err)
+		return tc.DBError, tc.SystemError
+	}
+	rollbackTransaction = false
+	return nil, tc.NoError
+}
+
+func insertQuery() string {
+	query := `INSERT INTO parameter (
+name,
+config_file,
+value,
+secure) VALUES (
+:name,
+:config_file,
+:value,
+:secure) RETURNING id,last_updated`
+	return query
+}
+
+func (parameter *TOParameter) Read(db *sqlx.DB, parameters map[string]string, user auth.CurrentUser) ([]interface{}, []error, tc.ApiErrorType) {
+	var rows *sqlx.Rows
+
+	privLevel := user.PrivLevel
+
+	// Query Parameters to Database Query column mappings
+	// see the fields mapped in the SQL query
+	queryParamsToQueryCols := map[string]dbhelpers.WhereColumnInfo{
+		"config_file":  dbhelpers.WhereColumnInfo{"p.config_file", nil},
+		"id":           dbhelpers.WhereColumnInfo{"p.id", api.IsInt},
+		"last_updated": dbhelpers.WhereColumnInfo{"p.last_updated", nil},
+		"name":         dbhelpers.WhereColumnInfo{"p.name", nil},
+		"secure":       dbhelpers.WhereColumnInfo{"p.secure", api.IsBool},
+	}
+
+	where, orderBy, queryValues, errs := dbhelpers.BuildWhereAndOrderBy(parameters, queryParamsToQueryCols)
+	if len(errs) > 0 {
+		return nil, errs, tc.DataConflictError
+	}
+
+	query := selectQuery() + where + ParametersGroupBy() + orderBy
+	log.Debugln("Query is ", query)
+
+	rows, err := db.NamedQuery(query, queryValues)
+	if err != nil {
+		log.Errorf("Error querying Parameters: %v", err)
+		return nil, []error{tc.DBError}, tc.SystemError
+	}
+	defer rows.Close()
+
+	params := []interface{}{}
+	hiddenField := "********"
+	for rows.Next() {
+		var p tc.ParameterNullable
+		if err = rows.StructScan(&p); err != nil {
+			log.Errorf("error parsing Parameter rows: %v", err)
+			return nil, []error{tc.DBError}, tc.SystemError
+		}
+		var isSecure bool
+		if p.Secure != nil {
+			isSecure = *p.Secure
+		}
+
+		if isSecure && (privLevel < auth.PrivLevelAdmin) {
+			p.Value = &hiddenField
+		}
+		params = append(params, p)
+	}
+
+	return params, []error{}, tc.NoError
+
+}
+
+//The TOParameter implementation of the Updater interface
+//all implementations of Updater should use transactions and return the proper errorType
+//ParsePQUniqueConstraintError is used to determine if a parameter with conflicting values exists
+//if so, it will return an errorType of DataConflict and the type should be appended to the
+//generic error message returned
+func (pl *TOParameter) Update(db *sqlx.DB, user auth.CurrentUser) (error, tc.ApiErrorType) {
+	rollbackTransaction := true
+	tx, err := db.Beginx()
+	defer func() {
+		if tx == nil || !rollbackTransaction {
+			return
+		}
+		err := tx.Rollback()
+		if err != nil {
+			log.Errorln(errors.New("rolling back transaction: " + err.Error()))
+		}
+	}()
+
+	if err != nil {
+		log.Error.Printf("could not begin transaction: %v", err)
+		return tc.DBError, tc.SystemError
+	}
+	log.Debugf("about to run exec query: %s with parameter: %++v", updateQuery(), pl)
+	resultRows, err := tx.NamedQuery(updateQuery(), pl)
+	if err != nil {
+		if pqErr, ok := err.(*pq.Error); ok {
+			err, eType := dbhelpers.ParsePQUniqueConstraintError(pqErr)
+			if eType == tc.DataConflictError {
+				return errors.New("a parameter with " + err.Error()), eType
+			}
+			return err, eType
+		}
+		log.Errorf("received error: %++v from update execution", err)
+		return tc.DBError, tc.SystemError
+	}
+	defer resultRows.Close()
+
+	// get LastUpdated field -- updated by trigger in the db
+	var lastUpdated tc.TimeNoMod
+	rowsAffected := 0
+	for resultRows.Next() {
+		rowsAffected++
+		if err := resultRows.Scan(&lastUpdated); err != nil {
+			log.Error.Printf("could not scan lastUpdated from insert: %s\n", err)
+			return tc.DBError, tc.SystemError
+		}
+	}
+	log.Debugf("lastUpdated: %++v", lastUpdated)
+	pl.LastUpdated = &lastUpdated
+	if rowsAffected != 1 {
+		if rowsAffected < 1 {
+			return errors.New("no parameter found with this id"), tc.DataMissingError
+		}
+		return fmt.Errorf("this update affected too many rows: %d", rowsAffected), tc.SystemError
+	}
+	err = tx.Commit()
+	if err != nil {
+		log.Errorln("Could not commit transaction: ", err)
+		return tc.DBError, tc.SystemError
+	}
+	rollbackTransaction = false
+	return nil, tc.NoError
+}
+
+//The Parameter implementation of the Deleter interface
+//all implementations of Deleter should use transactions and return the proper errorType
+func (pl *TOParameter) Delete(db *sqlx.DB, user auth.CurrentUser) (error, tc.ApiErrorType) {
+	rollbackTransaction := true
+	tx, err := db.Beginx()
+	defer func() {
+		if tx == nil || !rollbackTransaction {
+			return
+		}
+		err := tx.Rollback()
+		if err != nil {
+			log.Errorln(errors.New("rolling back transaction: " + err.Error()))
+		}
+	}()
+
+	if err != nil {
+		log.Error.Printf("could not begin transaction: %v", err)
+		return tc.DBError, tc.SystemError
+	}
+	log.Debugf("about to run exec query: %s with parameter: %++v", deleteQuery(), pl)
+	result, err := tx.NamedExec(deleteQuery(), pl)
+	if err != nil {
+		log.Errorf("received error: %++v from delete execution", err)
+		return tc.DBError, tc.SystemError
+	}
+	rowsAffected, err := result.RowsAffected()
+	if err != nil {
+		return tc.DBError, tc.SystemError
+	}
+	if rowsAffected < 1 {
+		return errors.New("no parameter with that id found"), tc.DataMissingError
+	}
+	if rowsAffected > 1 {
+		return fmt.Errorf("this create affected too many rows: %d", rowsAffected), tc.SystemError
+	}
+
+	err = tx.Commit()
+	if err != nil {
+		log.Errorln("Could not commit transaction: ", err)
+		return tc.DBError, tc.SystemError
+	}
+	rollbackTransaction = false
+	return nil, tc.NoError
+}
+
+func selectQuery() string {
+
+	query := `SELECT
+p.config_file,
+p.id,
+p.last_updated,
+p.name,
+p.value,
+p.secure,
+COALESCE(array_to_json(array_agg(pr.name) FILTER (WHERE pr.name IS NOT NULL)), '[]') AS profiles
+FROM parameter p
+LEFT JOIN profile_parameter pp ON p.id = pp.parameter
+LEFT JOIN profile pr ON pp.profile = pr.id`
+	return query
+}
+
+func updateQuery() string {
+	query := `UPDATE
+parameter SET
+config_file=:config_file,
+id=:id,
+name=:name,
+value=:value,
+secure=:secure
+WHERE id=:id RETURNING last_updated`
+	return query
+}
+
+// ParametersGroupBy ...
+func ParametersGroupBy() string {
+	groupBy := ` GROUP BY p.config_file, p.id, p.last_updated, p.name, p.value, p.secure`
+	return groupBy
+}
+
+func deleteQuery() string {
+	query := `DELETE FROM parameter
+WHERE id=:id`
+	return query
+}
diff --git a/traffic_ops/traffic_ops_golang/parameters_test.go b/traffic_ops/traffic_ops_golang/parameter/parameters_test.go
similarity index 59%
rename from traffic_ops/traffic_ops_golang/parameters_test.go
rename to traffic_ops/traffic_ops_golang/parameter/parameters_test.go
index 1b9049cd93..4cbfc89732 100644
--- a/traffic_ops/traffic_ops_golang/parameters_test.go
+++ b/traffic_ops/traffic_ops_golang/parameter/parameters_test.go
@@ -1,4 +1,4 @@
-package main
+package parameter
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one
@@ -24,6 +24,7 @@ import (
 	"time"
 
 	"github.com/apache/incubator-trafficcontrol/lib/go-tc"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/api"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/auth"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/test"
 	"github.com/jmoiron/sqlx"
@@ -33,23 +34,31 @@ import (
 	sqlmock "gopkg.in/DATA-DOG/go-sqlmock.v1"
 )
 
-func getTestParameters() []tc.Parameter {
-	parameters := []tc.Parameter{}
-	testParameter := tc.Parameter{
-		ConfigFile:  "global",
-		ID:          1,
-		LastUpdated: tc.TimeNoMod{Time: time.Now()},
-		Name:        "paramname1",
+func getTestParameters() []tc.ParameterNullable {
+	parameters := []tc.ParameterNullable{}
+	lastUpdated := tc.TimeNoMod{}
+	lastUpdated.Scan(time.Now())
+	configFile := "global"
+	secureFlag := false
+	ID := 1
+	param := "paramname1"
+	val := "val1"
+
+	testParameter := tc.ParameterNullable{
+		ConfigFile:  &configFile,
+		ID:          &ID,
+		LastUpdated: &lastUpdated,
+		Name:        &param,
 		Profiles:    json.RawMessage(`["foo","bar"]`),
-		Secure:      false,
-		Value:       "val1",
+		Secure:      &secureFlag,
+		Value:       &val,
 	}
 	parameters = append(parameters, testParameter)
 
 	testParameter2 := testParameter
-	testParameter2.Name = "paramname2"
-	testParameter2.Value = "val2"
-	testParameter2.ConfigFile = "some.config"
+	testParameter2.Name = &param
+	testParameter2.Value = &val
+	testParameter2.ConfigFile = &configFile
 	testParameter2.Profiles = json.RawMessage(`["foo","baz"]`)
 	parameters = append(parameters, testParameter2)
 
@@ -67,11 +76,9 @@ func TestGetParameters(t *testing.T) {
 	defer db.Close()
 
 	testParameters := getTestParameters()
-	cols := test.ColsFromStructByTag("db", tc.Parameter{})
+	cols := test.ColsFromStructByTag("db", tc.ParameterNullable{})
 	rows := sqlmock.NewRows(cols)
 
-	//TODO: drichardson - build helper to add these Rows from the struct values
-	//                    or by CSV if types get in the way
 	for _, ts := range testParameters {
 		rows = rows.AddRow(
 			ts.ConfigFile,
@@ -86,25 +93,34 @@ func TestGetParameters(t *testing.T) {
 	mock.ExpectQuery("SELECT").WillReturnRows(rows)
 	v := map[string]string{"dsId": "1"}
 
-	parameters, errs, errType := getParameters(v, db, auth.PrivLevelAdmin)
+	parameters, errs, _ := refType.Read(db, v, auth.CurrentUser{})
 	if len(errs) > 0 {
-		t.Errorf("getParameters expected: no errors, actual: %v with error type: %s", errs, errType.String())
+		t.Errorf("parameter.Read expected: no errors, actual: %v", errs)
 	}
 
 	if len(parameters) != 2 {
-		t.Errorf("getParameters expected: len(parameters) == 1, actual: %v", len(parameters))
+		t.Errorf("parameter.Read expected: len(parameters) == 2, actual: %v", len(parameters))
 	}
 
 }
 
-type SortableParameters []tc.Parameter
+func TestInterfaces(t *testing.T) {
+	var i interface{}
+	i = &TOParameter{}
 
-func (s SortableParameters) Len() int {
-	return len(s)
-}
-func (s SortableParameters) Swap(i, j int) {
-	s[i], s[j] = s[j], s[i]
-}
-func (s SortableParameters) Less(i, j int) bool {
-	return s[i].Name < s[j].Name
+	if _, ok := i.(api.Creator); !ok {
+		t.Errorf("Parameter must be Creator")
+	}
+	if _, ok := i.(api.Reader); !ok {
+		t.Errorf("Parameter must be Reader")
+	}
+	if _, ok := i.(api.Updater); !ok {
+		t.Errorf("Parameter must be Updater")
+	}
+	if _, ok := i.(api.Deleter); !ok {
+		t.Errorf("Parameter must be Deleter")
+	}
+	if _, ok := i.(api.Identifier); !ok {
+		t.Errorf("Parameter must be Identifier")
+	}
 }
diff --git a/traffic_ops/traffic_ops_golang/parameters.go b/traffic_ops/traffic_ops_golang/parameters.go
deleted file mode 100644
index bbba922906..0000000000
--- a/traffic_ops/traffic_ops_golang/parameters.go
+++ /dev/null
@@ -1,147 +0,0 @@
-package main
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import (
-	"encoding/json"
-	"fmt"
-	"net/http"
-
-	"github.com/apache/incubator-trafficcontrol/lib/go-log"
-	"github.com/apache/incubator-trafficcontrol/lib/go-tc"
-	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/api"
-	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/auth"
-	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/dbhelpers"
-
-	"github.com/jmoiron/sqlx"
-)
-
-func parametersHandler(db *sqlx.DB) http.HandlerFunc {
-	return func(w http.ResponseWriter, r *http.Request) {
-		handleErrs := tc.GetHandleErrorsFunc(w, r)
-
-		ctx := r.Context()
-		user, err := auth.GetCurrentUser(ctx)
-		if err != nil {
-			handleErrs(http.StatusInternalServerError, err)
-			return
-		}
-		privLevel := user.PrivLevel
-
-		params, err := api.GetCombinedParams(r)
-		if err != nil {
-			log.Errorf("unable to get parameters from request: %s", err)
-			handleErrs(http.StatusInternalServerError, err)
-		}
-
-		resp, errs, errType := getParametersResponse(params, db, privLevel)
-		if len(errs) > 0 {
-			tc.HandleErrorsWithType(errs, errType, handleErrs)
-			return
-		}
-
-		respBts, err := json.Marshal(resp)
-		if err != nil {
-			handleErrs(http.StatusInternalServerError, err)
-			return
-		}
-
-		w.Header().Set("Content-Type", "application/json")
-		fmt.Fprintf(w, "%s", respBts)
-	}
-}
-
-func getParametersResponse(params map[string]string, db *sqlx.DB, privLevel int) (*tc.ParametersResponse, []error, tc.ApiErrorType) {
-	parameters, errs, errType := getParameters(params, db, privLevel)
-	if len(errs) > 0 {
-		return nil, errs, errType
-	}
-
-	resp := tc.ParametersResponse{
-		Response: parameters,
-	}
-	return &resp, nil, tc.NoError
-}
-
-func getParameters(params map[string]string, db *sqlx.DB, privLevel int) ([]tc.Parameter, []error, tc.ApiErrorType) {
-
-	var rows *sqlx.Rows
-	var err error
-
-	// Query Parameters to Database Query column mappings
-	// see the fields mapped in the SQL query
-	queryParamsToSQLCols := map[string]dbhelpers.WhereColumnInfo{
-		"config_file":  dbhelpers.WhereColumnInfo{"p.config_file", nil},
-		"id":           dbhelpers.WhereColumnInfo{"p.id", api.IsInt},
-		"last_updated": dbhelpers.WhereColumnInfo{"p.last_updated", nil},
-		"name":         dbhelpers.WhereColumnInfo{"p.name", nil},
-		"secure":       dbhelpers.WhereColumnInfo{"p.secure", api.IsBool},
-	}
-
-	where, orderBy, queryValues, errs := dbhelpers.BuildWhereAndOrderBy(params, queryParamsToSQLCols)
-	if len(errs) > 0 {
-		return nil, errs, tc.DataConflictError
-	}
-
-	query := selectParametersQuery() + where + ParametersGroupBy() + orderBy
-	log.Debugln("Query is ", query)
-
-	rows, err = db.NamedQuery(query, queryValues)
-	if err != nil {
-		return nil, []error{fmt.Errorf("querying: %v", err)}, tc.SystemError
-	}
-	defer rows.Close()
-
-	parameters := []tc.Parameter{}
-	for rows.Next() {
-		var s tc.Parameter
-		if err = rows.StructScan(&s); err != nil {
-			return nil, []error{fmt.Errorf("getting parameters: %v", err)}, tc.SystemError
-		}
-		if s.Secure && privLevel < auth.PrivLevelAdmin {
-			// Secure params only visible to admin
-			continue
-		}
-		parameters = append(parameters, s)
-	}
-	return parameters, nil, tc.NoError
-}
-
-func selectParametersQuery() string {
-
-	query := `SELECT
-p.config_file,
-p.id,
-p.last_updated,
-p.name,
-p.value,
-p.secure,
-COALESCE(array_to_json(array_agg(pr.name) FILTER (WHERE pr.name IS NOT NULL)), '[]') AS profiles
-FROM parameter p
-LEFT JOIN profile_parameter pp ON p.id = pp.parameter
-LEFT JOIN profile pr ON pp.profile = pr.id`
-	return query
-}
-
-// ParametersGroupBy ...
-func ParametersGroupBy() string {
-	groupBy := ` GROUP BY p.config_file, p.id, p.last_updated, p.name, p.value, p.secure`
-	return groupBy
-}
diff --git a/traffic_ops/traffic_ops_golang/physlocation/phys_locations.go b/traffic_ops/traffic_ops_golang/physlocation/phys_locations.go
index 451fd92491..be45249960 100644
--- a/traffic_ops/traffic_ops_golang/physlocation/phys_locations.go
+++ b/traffic_ops/traffic_ops_golang/physlocation/phys_locations.go
@@ -186,6 +186,7 @@ func (pl *TOPhysLocation) Update(db *sqlx.DB, user auth.CurrentUser) (error, tc.
 	}
 	defer resultRows.Close()
 
+	// get LastUpdated field -- updated by trigger in the db
 	var lastUpdated tc.TimeNoMod
 	rowsAffected := 0
 	for resultRows.Next() {
@@ -196,7 +197,7 @@ func (pl *TOPhysLocation) Update(db *sqlx.DB, user auth.CurrentUser) (error, tc.
 		}
 	}
 	log.Debugf("lastUpdated: %++v", lastUpdated)
-	pl.LastUpdated = lastUpdated
+	pl.LastUpdated = &lastUpdated
 	if rowsAffected != 1 {
 		if rowsAffected < 1 {
 			return errors.New("no phys_location found with this id"), tc.DataMissingError
@@ -272,7 +273,7 @@ func (pl *TOPhysLocation) Create(db *sqlx.DB, user auth.CurrentUser) (error, tc.
 	}
 
 	pl.SetID(id)
-	pl.LastUpdated = lastUpdated
+	pl.LastUpdated = &lastUpdated
 	err = tx.Commit()
 	if err != nil {
 		log.Errorln("Could not commit transaction: ", err)
diff --git a/traffic_ops/traffic_ops_golang/routes.go b/traffic_ops/traffic_ops_golang/routes.go
index 7c7df779f3..b5ccaf00c0 100644
--- a/traffic_ops/traffic_ops_golang/routes.go
+++ b/traffic_ops/traffic_ops_golang/routes.go
@@ -35,6 +35,7 @@ import (
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/cdn"
 	dsrequest "github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/deliveryservice/request"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/division"
+	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/parameter"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/physlocation"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/ping"
 	"github.com/apache/incubator-trafficcontrol/traffic_ops/traffic_ops_golang/region"
@@ -148,7 +149,11 @@ func Routes(d ServerData) ([]Route, http.Handler, error) {
 		{1.3, http.MethodDelete, `regions/{id}$`, api.DeleteHandler(region.GetRefType(), d.DB), auth.PrivLevelOperations, Authenticated, nil},
 
 		//Parameters
-		{1.3, http.MethodGet, `parameters/?(\.json)?$`, parametersHandler(d.DB), auth.PrivLevelReadOnly, Authenticated, nil},
+		{1.3, http.MethodGet, `parameters/?(\.json)?$`, api.ReadHandler(parameter.GetRefType(), d.DB), auth.PrivLevelReadOnly, Authenticated, nil},
+		{1.3, http.MethodGet, `parameters/{id}$`, api.ReadHandler(parameter.GetRefType(), d.DB), auth.PrivLevelReadOnly, Authenticated, nil},
+		{1.3, http.MethodPut, `parameters/{id}$`, api.UpdateHandler(parameter.GetRefType(), d.DB), auth.PrivLevelOperations, Authenticated, nil},
+		{1.3, http.MethodPost, `parameters/?$`, api.CreateHandler(parameter.GetRefType(), d.DB), auth.PrivLevelOperations, Authenticated, nil},
+		{1.3, http.MethodDelete, `parameters/{id}$`, api.DeleteHandler(parameter.GetRefType(), d.DB), auth.PrivLevelOperations, Authenticated, nil},
 
 		//Ping
 		{1.2, http.MethodGet, `ping$`, ping.PingHandler(), auth.PrivLevelReadOnly, Authenticated, nil},
diff --git a/traffic_ops/traffic_ops_golang/systeminfo/system_info.go b/traffic_ops/traffic_ops_golang/systeminfo/system_info.go
index 1554a56f98..1dcb8fee43 100644
--- a/traffic_ops/traffic_ops_golang/systeminfo/system_info.go
+++ b/traffic_ops/traffic_ops_golang/systeminfo/system_info.go
@@ -65,7 +65,7 @@ func getSystemInfoResponse(db *sqlx.DB, privLevel int) (*tc.SystemInfoResponse,
 	}
 
 	resp := tc.SystemInfoResponse{}
-	resp.Response.Parameters = info
+	resp.Response.ParametersNullable = info
 	return &resp, nil
 }
 
@@ -74,6 +74,7 @@ func getSystemInfo(db *sqlx.DB, privLevel int) (map[string]string, error) {
 	query := `SELECT
 p.name,
 p.secure,
+p.last_updated,
 p.value
 FROM parameter p
 WHERE p.config_file='global'`
@@ -87,15 +88,26 @@ WHERE p.config_file='global'`
 
 	info := make(map[string]string)
 	for rows.Next() {
-		p := tc.Parameter{}
+		p := tc.ParameterNullable{}
 		if err = rows.StructScan(&p); err != nil {
 			return nil, fmt.Errorf("getting system_info: %v", err)
 		}
-		if p.Secure && privLevel < auth.PrivLevelAdmin {
+
+		var isSecure bool
+		if p.Secure != nil {
+			isSecure = *p.Secure
+		}
+
+		name := p.Name
+		value := p.Value
+		if isSecure && privLevel < auth.PrivLevelAdmin {
 			// Secure params only visible to admin
 			continue
 		}
-		info[p.Name] = p.Value
+
+		if name != nil && value != nil {
+			info[*name] = *value
+		}
 	}
 	if err != nil {
 		return nil, err
diff --git a/traffic_ops/traffic_ops_golang/systeminfo/system_info_test.go b/traffic_ops/traffic_ops_golang/systeminfo/system_info_test.go
index e1628d4906..1fd316d7b8 100644
--- a/traffic_ops/traffic_ops_golang/systeminfo/system_info_test.go
+++ b/traffic_ops/traffic_ops_golang/systeminfo/system_info_test.go
@@ -33,28 +33,6 @@ import (
 	"github.com/jmoiron/sqlx"
 )
 
-var sysInfoParameters = []tc.Parameter{
-	tc.Parameter{
-		ConfigFile:  "global",
-		ID:          1,
-		LastUpdated: tc.TimeNoMod{Time: time.Now()},
-		Name:        "paramname1",
-		Profiles:    json.RawMessage(`["foo","bar"]`),
-		Secure:      false,
-		Value:       "val1",
-	},
-
-	tc.Parameter{
-		ConfigFile:  "global",
-		ID:          2,
-		LastUpdated: tc.TimeNoMod{Time: time.Now()},
-		Name:        "paramname2",
-		Profiles:    json.RawMessage(`["foo","bar"]`),
-		Secure:      false,
-		Value:       "val2",
-	},
-}
-
 func TestGetSystemInfo(t *testing.T) {
 	mockDB, mock, err := sqlmock.New()
 	if err != nil {
@@ -65,11 +43,43 @@ func TestGetSystemInfo(t *testing.T) {
 	db := sqlx.NewDb(mockDB, "sqlmock")
 	defer db.Close()
 
-	cols := test.ColsFromStructByTag("db", tc.Parameter{})
+	cols := test.ColsFromStructByTag("db", tc.ParameterNullable{})
 	rows := sqlmock.NewRows(cols)
 
-	//TODO: drichardson - build helper to add these Rows from the struct values
-	//                    or by CSV if types get in the way
+	lastUpdated := tc.TimeNoMod{Time: time.Now()}
+	configFile := "global"
+	secure := false
+
+	firstID := 1
+	firstName := "paramname1"
+	firstVal := "val1"
+
+	secondID := 2
+	secondName := "paramname2"
+	secondVal := "val2"
+
+	var sysInfoParameters = []tc.ParameterNullable{
+		tc.ParameterNullable{
+			ConfigFile:  &configFile,
+			ID:          &firstID,
+			LastUpdated: &lastUpdated,
+			Name:        &firstName,
+			Profiles:    json.RawMessage(`["foo","bar"]`),
+			Secure:      &secure,
+			Value:       &firstVal,
+		},
+
+		tc.ParameterNullable{
+			ConfigFile:  &configFile,
+			ID:          &secondID,
+			LastUpdated: &lastUpdated,
+			Name:        &secondName,
+			Profiles:    json.RawMessage(`["foo","bar"]`),
+			Secure:      &secure,
+			Value:       &secondVal,
+		},
+	}
+
 	for _, ts := range sysInfoParameters {
 		rows = rows.AddRow(
 			ts.ConfigFile,


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services