You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@openwhisk.apache.org by dr...@apache.org on 2018/04/10 15:13:03 UTC

[incubator-openwhisk-deploy-mesos] branch master updated: Add DC/OS universe for OpenWhisk and orchestration scripts (#2)

This is an automated email from the ASF dual-hosted git repository.

dragos pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-openwhisk-deploy-mesos.git


The following commit(s) were added to refs/heads/master by this push:
     new 341f6ef  Add DC/OS universe for OpenWhisk and orchestration scripts (#2)
341f6ef is described below

commit 341f6efc514be7484c92a583eabf71be3eaa8d48
Author: Duy Nguyen <du...@gmail.com>
AuthorDate: Tue Apr 10 10:12:59 2018 -0500

    Add DC/OS universe for OpenWhisk and orchestration scripts (#2)
    
    * Set up project skeleton
    * Add latest universe
    * Add script for orchestration
    * Update couchdb package to use openwhisk image
    * Update accepted resource roles for api-gateway
    * Update latest params for controller and invoker
    * Update deploy script
    * Remove kamon and runtime_manifest from config
    * Update universe readme
    * Update subprojects
    * Update project status in readme
---
 .gitignore                                         |   2 +
 .travis.yml                                        |  16 +
 CONTRIBUTING.md                                    |  66 ++
 README.md                                          |  20 +
 dcos-deploy-script/Makefile                        | 127 ++++
 dcos-deploy-script/README.md                       |  57 ++
 dcos-deploy-script/config/kafka.json               |  10 +
 dcos-universe/LICENSE                              | 202 ++++++
 dcos-universe/README.md                            | 496 ++++++++++++++
 dcos-universe/converter/Dockerfile                 |  17 +
 dcos-universe/converter/build.bash                 |  50 ++
 dcos-universe/converter/marathon.json              |  34 +
 dcos-universe/converter/service/__init__.py        |   0
 dcos-universe/converter/service/converter.py       | 226 ++++++
 dcos-universe/docker/local-universe/Dockerfile     |  10 +
 .../docker/local-universe/Dockerfile.base          |  29 +
 .../docker/local-universe/Dockerfile.static        |  30 +
 .../docker/local-universe/Dockerfile.static.base   |   5 +
 dcos-universe/docker/local-universe/Makefile       |  54 ++
 dcos-universe/docker/local-universe/README.md      | 250 +++++++
 .../dcos-local-universe-http.service               |  16 +
 .../dcos-local-universe-registry.service           |  16 +
 dcos-universe/docker/local-universe/default.conf   |  95 +++
 .../docker/local-universe/docker-compose.yml       |  15 +
 .../docker/local-universe/registry-config.yml      |  23 +
 dcos-universe/docker/server/Dockerfile             |  15 +
 dcos-universe/docker/server/build.bash             | 114 ++++
 dcos-universe/docker/server/marathon.json          |  36 +
 .../etc/nginx/conf.d/universe.marathon.mesos.conf  |  93 +++
 dcos-universe/docs/tutorial/Dockerfile             |  11 +
 dcos-universe/docs/tutorial/GetStarted.md          | 502 ++++++++++++++
 dcos-universe/docs/tutorial/helloworld.py          |  30 +
 .../docs/tutorial/time-server/0/config.json        |  28 +
 .../tutorial/time-server/0/marathon.json.mustache  |  35 +
 .../docs/tutorial/time-server/0/package.json       |   8 +
 .../docs/tutorial/time-server/0/resource.json      |   9 +
 dcos-universe/hooks/pre-commit                     |  10 +
 .../repo/meta/schema/build-definition-schema.json  | 447 ++++++++++++
 dcos-universe/repo/meta/schema/command-schema.json |  20 +
 dcos-universe/repo/meta/schema/config-schema.json  | 150 ++++
 .../repo/meta/schema/metadata-schema.json          | 426 ++++++++++++
 dcos-universe/repo/meta/schema/package-schema.json | 339 +++++++++
 .../repo/meta/schema/v2-resource-schema.json       |  55 ++
 dcos-universe/repo/meta/schema/v3-repo-schema.json | 507 ++++++++++++++
 .../repo/meta/schema/v3-resource-schema.json       | 172 +++++
 dcos-universe/repo/meta/schema/v4-repo-schema.json | 629 +++++++++++++++++
 .../repo/packages/A/apigateway/0/config.json       |  93 +++
 .../packages/A/apigateway/0/marathon.json.mustache |  60 ++
 .../repo/packages/A/apigateway/0/package.json      |  18 +
 .../repo/packages/A/apigateway/0/resource.json     |  14 +
 .../repo/packages/W/whisk-controller/0/config.json | 356 ++++++++++
 .../W/whisk-controller/0/marathon.json.mustache    | 117 ++++
 .../packages/W/whisk-controller/0/package.json     |  19 +
 .../packages/W/whisk-controller/0/resource.json    |   7 +
 .../repo/packages/W/whisk-couchdb/0/config.json    |  93 +++
 .../W/whisk-couchdb/0/marathon.json.mustache       |  59 ++
 .../repo/packages/W/whisk-couchdb/0/package.json   |  16 +
 .../repo/packages/W/whisk-couchdb/0/resource.json  |  14 +
 .../repo/packages/W/whisk-invoker/0/config.json    | 314 +++++++++
 .../W/whisk-invoker/0/marathon.json.mustache       | 149 ++++
 .../repo/packages/W/whisk-invoker/0/package.json   |  19 +
 .../repo/packages/W/whisk-invoker/0/resource.json  |   7 +
 dcos-universe/scripts/build.sh                     |  23 +
 dcos-universe/scripts/diff_version.sh              |  27 +
 dcos-universe/scripts/gen_universe.py              | 754 +++++++++++++++++++++
 dcos-universe/scripts/generate-config-reference.py |  84 +++
 dcos-universe/scripts/install-git-hooks.sh         |  18 +
 dcos-universe/scripts/json_dup_key_check.py        |  32 +
 dcos-universe/scripts/local-universe.py            | 475 +++++++++++++
 dcos-universe/scripts/old-local-universe.py        | 412 +++++++++++
 dcos-universe/scripts/publish-local-universe.sh    |  10 +
 .../scripts/requirements/requirements.txt          |   1 +
 dcos-universe/scripts/validate_packages.py         | 181 +++++
 tools/travis/scancode.sh                           |  11 +
 tools/travis/setupscan.sh                          |   8 +
 75 files changed, 8893 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3ef6a01
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+*.iml
+*/target/
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..ec06487
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,16 @@
+sudo: required
+
+env:
+  global:
+    - DOCKER_COMPOSE_VERSION: 1.13
+
+services:
+  - docker
+
+before_install:
+  - ./tools/travis/setupscan.sh
+  - ${TOOL}/.travis/setup.sh
+
+script:
+  - ./tools/travis/scancode.sh
+  - ${TOOL}/.travis/build.sh
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..4dca934
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,66 @@
+<!--
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more contributor
+# license agreements.  See the NOTICE file distributed with this work for additional
+# information regarding copyright ownership.  The ASF licenses this file to you
+# under the Apache License, Version 2.0 (the # "License"); you may not use this
+# file except in compliance with the License.  You may obtain a copy of the License
+# at:
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software distributed
+# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
+# CONDITIONS OF ANY KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations under the License.
+#
+-->
+
+# Contributing to Apache OpenWhisk
+
+Anyone can contribute to the OpenWhisk project and we welcome your contributions.
+
+There are multiple ways to contribute: report bugs, improve the docs, and
+contribute code, but you must follow these prerequisites and guidelines:
+
+ - [Contributor License Agreement](#contributor-license-agreement)
+ - [Raising issues](#raising-issues)
+ - [Coding Standards](#coding-standards)
+
+### Contributor License Agreement
+
+All contributors must sign and submit an Apache CLA (Contributor License Agreement).
+
+Instructions on how to do this can be found here:
+[http://www.apache.org/licenses/#clas](http://www.apache.org/licenses/#clas)
+
+Once submitted, you will receive a confirmation email from the Apache Software Foundation (ASF) and be added to
+the following list: http://people.apache.org/unlistedclas.html.
+
+Project committers will use this list to verify pull requests (PRs) come from contributors that have signed a CLA.
+
+We look forward to your contributions!
+
+## Raising issues
+
+Please raise any bug reports on the respective project repository's GitHub issue tracker. Be sure to search the
+list to see if your issue has already been raised.
+
+A good bug report is one that make it easy for us to understand what you were trying to do and what went wrong.
+Provide as much context as possible so we can try to recreate the issue.
+
+### Discussion
+
+Please use the project's developer email list to engage our community:
+[dev@openwhisk.incubator.apache.org](dev@openwhisk.incubator.apache.org)
+
+In addition, we provide a "dev" Slack team channel for conversations at:
+https://openwhisk-team.slack.com/messages/dev/
+
+### Coding standards
+
+Please ensure you follow the coding standards used throughout the existing
+code base. Some basic rules include:
+
+ - all files must have the Apache license in the header.
+ - all PRs must have passing builds for all operating systems.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..bb86dbb
--- /dev/null
+++ b/README.md
@@ -0,0 +1,20 @@
+# OpenWhisk Deployment for Mesos
+
+[![License](https://img.shields.io/badge/license-Apache--2.0-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0)
+[![Build Status](https://travis-ci.org/apache/incubator-openwhisk-deploy-mesos.svg?branch=master)](https://travis-ci.org/apache/incubator-openwhisk-deploy-mesos)
+
+> Status: This module is under active development and is provided as-is for testing.
+
+This repository is part of [Apache OpenWhisk](http://openwhisk.incubator.apache.org/) and can be used to deploy OpenWhisk to a Mesos cluster.
+
+## Subprojects
+
+* [dcos-universe](dcos-universe/README.md) contains the source for building OpenWhisk packages in a DC/OS Universe.
+* [dcos-deploy-script](dcos-deploy-script/README.md) helps to orchestrate the deployment of above OpenWhisk packages.
+
+## Travis builds
+
+Each tool in this repository has to provide travis build scripts inside a `.travis` folder.
+The folder should define 2 scripts:
+* `setup.sh` - invoked during `before_install` phase
+* `build.sh` - invokes during `script` phase
diff --git a/dcos-deploy-script/Makefile b/dcos-deploy-script/Makefile
new file mode 100644
index 0000000..fc270f7
--- /dev/null
+++ b/dcos-deploy-script/Makefile
@@ -0,0 +1,127 @@
+DCOS_ENDPOINT ?= <DC/OS URL>
+APIGATEWAY_DNS ?= <gateway endpoint>
+OPENWHISK_UNIVERSE ?= <Universe URL>
+WSK_NAMESPACE ?= guest
+WSK_AUTH_TOKEN ?= <enter token here>
+
+cli:
+	curl -s https://downloads.dcos.io/binaries/cli/darwin/x86-64/dcos-1.10/dcos -o dcos
+	chmod +x dcos
+	./dcos --version
+	./dcos cluster setup $(DCOS_ENDPOINT)
+
+repo:
+	./dcos package repo add --index=0 openwhisk-universe $(OPENWHISK_UNIVERSE)
+
+.PHONY: openwhisk-install
+openwhisk-install: exhibitor-install kafka-install invoker-install controller-install test-helloworld
+
+.PHONY: openwhisk-uninstall
+openwhisk-uninstall: invoker-uninstall controller-uninstall kafka-uninstall exhibitor-uninstall
+
+.PHONY: apigateway-install
+apigateway-install:
+	yes | ./dcos package install apigateway
+
+.PHONY: exhibitor-install
+exhibitor-install:
+	yes | ./dcos package install exhibitor
+	echo "$$(tput setaf 4)waiting until the exhibitor is up (serving) ...$$(tput sgr0)"
+	until (curl -s http://exhibitor-dcos.$(APIGATEWAY_DNS)/exhibitor/v1/cluster/status | jq . | grep "serving" | wc -l | grep 3); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)exhibitor is up!$$(tput sgr0)"
+
+.PHONY: kafka-install
+kafka-install:
+	if (curl -s http://exhibitor-dcos.$(APIGATEWAY_DNS)/exhibitor/v1/cluster/status | jq . | grep "serving" | wc -l | grep 3); then \
+		echo "$$(tput setaf 4)exhibitor is running. installing kafka ...$$(tput sgr0)"; \
+		yes | ./dcos package install kafka --options=config/kafka.json; \
+		until ((./dcos kafka update status --name=kafka | grep -v COMPLETE) || echo "OK") | grep "OK"; do printf '.'; sleep 5; done; \
+		echo "$$(tput setaf 2)kafka is up!$$(tput sgr0)"; \
+	else \
+		echo "$$(tput setaf 1)exhibitor is not running. Cancelling kafka installation.$$(tput sgr0)"; \
+	fi
+
+.PHONY: couchdb-install
+couchdb-install:
+	echo "$$(tput setaf 4)installing whisk-couchdb ...$$(tput sgr0)"
+	yes | ./dcos package install whisk-couchdb
+	until (curl -s -I http://whisk-couchdb.$(APIGATEWAY_DNS) | grep "HTTP/1.1 200 OK"); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)couchdb is up!$$(tput sgr0)"
+
+.PHONY: invoker-install
+invoker-install:
+	if ((./dcos kafka update status --name=kafka | grep -v COMPLETE) || echo "OK") | grep "OK"; then \
+		echo "$$(tput setaf 4)installing invoker ...$$(tput sgr0)"; \
+		yes | ./dcos package install whisk-invoker; \
+		until (curl -s http://whisk-invoker.$(APIGATEWAY_DNS)/ping | grep "pong"); do printf '.'; sleep 5; done; \
+		echo "$$(tput setaf 2)invoker is up!$$(tput sgr0)"; \
+	else \
+		echo "$$(tput setaf 1)kafka is not running. Cancelling invoker installation.$$(tput sgr0)"; \
+	fi
+
+.PHONY: controller-install
+controller-install:
+	if ((./dcos kafka update status --name=kafka | grep -v COMPLETE) || echo "OK") | grep "OK"; then \
+		echo "$$(tput setaf 4)installing controller ...$$(tput sgr0)"; \
+		yes | ./dcos package install whisk-controller; \
+		until (curl -s http://whisk-controller.$(APIGATEWAY_DNS)/ping | grep "pong"); do printf '.'; sleep 5; done; \
+		echo "$$(tput setaf 2)controller is up!$$(tput sgr0)"; \
+		until (curl -s http://whisk-controller.$(APIGATEWAY_DNS)/invokers | grep "up"); do printf '.'; sleep 5; done; \
+		echo "$$(tput setaf 2)at least one invoker is healthy!$$(tput sgr0)"; \
+	else \
+		echo "$$(tput setaf 1)kafka is not running. Cancelling controller installation.$$(tput sgr0)"; \
+	fi
+
+.PHONY: exhibitor-uninstall
+exhibitor-uninstall:
+	echo "$$(tput setaf 4)uninstalling exhibitor ...$$(tput sgr0)"
+	printf "exhibitor-dcos\n" | ./dcos package uninstall exhibitor --app-id=exhibitor-dcos
+	until (curl -s http://exhibitor-dcos.$(APIGATEWAY_DNS)/exhibitor/v1/cluster/status | grep -v "serving"); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)exhibitor was successfully uninstalled!$$(tput sgr0)"
+
+.PHONY: kafka-uninstall
+kafka-uninstall:
+	echo "$$(tput setaf 4)uninstalling kafka ...$$(tput sgr0)"
+	yes | ./dcos package install kafka --cli --global
+	printf "kafka\n" | ./dcos package uninstall kafka --app-id=kafka
+	# need to sleep for some time (20 sec) before kafka enters the shutdown-in-progress phase
+	sleep 20
+	until (./dcos kafka update status --name=kafka | grep "Could not reach the service scheduler"); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)kafka was successfully uninstalled!$$(tput sgr0)"
+
+# only for <= DC/OS 1.9
+kafka-cleanup:
+	docker run mesosphere/janitor /janitor.py -r kafka-role -p dcos-kafka -z dcos-kervice-kafka \
+		-m $(DCOS_ENDPOINT)/mesos/master/ \
+		-n $(DCOS_ENDPOINT)/service/marathon/v2/apps/ \
+		-e $(DCOS_ENDPOINT)/exhibitor/ \
+		-a $(shell ./dcos config show core.dcos_acs_token)
+
+.PHONY: controller-uninstall
+controller-uninstall:
+	echo "$$(tput setaf 4)uninstalling controller ...$$(tput sgr0)"
+	printf "whisk-controller\n" | ./dcos package uninstall whisk-controller
+	until (curl -s http://whisk-controller.$(APIGATEWAY_DNS)/ping | grep -v "pong"); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)controller was successfully uninstalled!$$(tput sgr0)"
+
+.PHONY: invoker-uninstall
+invoker-uninstall:
+	echo "$$(tput setaf 4)uninstalling invoker ...$$(tput sgr0)"
+	printf "whisk-invoker\n" | ./dcos package uninstall whisk-invoker
+	until (curl -s http://whisk-invoker.$(APIGATEWAY_DNS)/ping | grep -v "pong"); do printf '.'; sleep 5; done
+	until (curl -s http://whisk-controller.$(APIGATEWAY_DNS)/invokers | grep -v "up"); do printf '.'; sleep 5; done
+	echo "$$(tput setaf 2)invoker was successfully uninstalled!$$(tput sgr0)"
+
+.PHONY: test-helloworld
+test-helloworld:
+	echo "testing hello-world action"
+	echo "$$(tput setaf 4)creating hello-world action ...$$(tput sgr0)"
+	curl -s -u "$(WSK_AUTH_TOKEN)" http://whisk-controller.$(APIGATEWAY_DNS)/api/v1/namespaces/$(WSK_NAMESPACE)/actions/hello-world -XPUT -d '{"namespace":"$(WSK_NAMESPACE)","name":"hello-world","exec":{"kind":"nodejs:6","code":"function main(){return {\"body\": \"Hello, World!\"};}"}}' -H "Content-Type: application/json"
+	echo "$$(tput setaf 4)invoking hello-world action ...$$(tput sgr0)"
+	if (curl -s -u "$(WSK_AUTH_TOKEN)" "http://whisk-controller.$(APIGATEWAY_DNS)/api/v1/namespaces/$(WSK_NAMESPACE)/actions/hello-world?blocking=true&result=true" -XPOST) | grep "Hello, World!"; then \
+		echo "$$(tput setaf 2)successfully invoked hello-world action$$(tput sgr0)"; \
+	else \
+		echo "$$(tput setaf 1)can not invoke hello-world action$$(tput sgr0)"; \
+	fi
+	echo "$$(tput setaf 1)deleting hello-world action ...$$(tput sgr0)"
+	curl -s -u "$(WSK_AUTH_TOKEN)" http://whisk-controller.$(APIGATEWAY_DNS)/api/v1/namespaces/$(WSK_NAMESPACE)/actions/hello-world -XDELETE
diff --git a/dcos-deploy-script/README.md b/dcos-deploy-script/README.md
new file mode 100644
index 0000000..6dfd77f
--- /dev/null
+++ b/dcos-deploy-script/README.md
@@ -0,0 +1,57 @@
+Bash script for deploying OpenWhisk packages on DC/OS
+=====================================================
+
+# Set up
+- Make sure the environment variables in `Makefile` are set correctly for your environment.
+- Install DC/OS CLI: `make cli` (by default using v1.10)
+
+# Install custom universes
+
+In order to add OpenWhisk universe packages to the DC/OS cluster (if not yet done):
+
+```bash
+make repo
+```
+
+# Installation steps
+
+## First installation on a fresh cluster
+
+- `make apigateway-install`
+- make sure Route53 record was set up correctly
+- `make couchdb-install`
+
+## Installation of OpenWhisk packages
+
+```bash
+make openwhisk-install
+```
+
+# Make commands deep dive
+## Install APIGateway
+
+```bash
+make apigateway-install
+```
+
+## Install OpenWhisk packages
+
+```bash
+make openwhisk-install
+```
+
+This installs the OpenWhisk stack, including:
+- Dedicated Zookeeper Exhibitor for OpenWhisk
+- Kafka service
+- OpenWhisk Controller
+- OpenWhisk Invoker
+
+There are also scripts for individual services, e.g. `kafka-install`.
+
+## Uninstall OpenWhisk packages
+
+```bash
+make openwhisk-uninstall
+```
+
+There are also uninstalling scripts for individual services, e.g. `kafka-uninstall`.
diff --git a/dcos-deploy-script/config/kafka.json b/dcos-deploy-script/config/kafka.json
new file mode 100644
index 0000000..09a193f
--- /dev/null
+++ b/dcos-deploy-script/config/kafka.json
@@ -0,0 +1,10 @@
+{
+  "brokers": {
+    "port": 9092
+  },
+  "kafka": {
+    "kafka_zookeeper_uri": "exhibitor-dcos.marathon.mesos:31886",
+    "default_replication_factor": 2,
+    "delete_topic_enable": true
+  }
+}
diff --git a/dcos-universe/LICENSE b/dcos-universe/LICENSE
new file mode 100644
index 0000000..7302ad9
--- /dev/null
+++ b/dcos-universe/LICENSE
@@ -0,0 +1,202 @@
+
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright 2014 Mesosphere
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/dcos-universe/README.md b/dcos-universe/README.md
new file mode 100644
index 0000000..5929cfb
--- /dev/null
+++ b/dcos-universe/README.md
@@ -0,0 +1,496 @@
+# DC/OS Universe of OpenWhisk packages
+
+| Build | Status |
+|---|---|
+|CI   | [![Build Status](https://teamcity.mesosphere.io/guestAuth/app/rest/builds/buildType:(id:Oss_Universe_Ci)/statusIcon)](https://teamcity.mesosphere.io/viewType.html?buildTypeId=Oss_Universe_Ci&guest=1)|
+| Universe Server | [![Build Status](https://teamcity.mesosphere.io/guestAuth/app/rest/builds/buildType:(id:Oss_Universe_UniverseServer)/statusIcon)](https://teamcity.mesosphere.io/viewType.html?buildTypeId=Oss_Universe_UniverseServer&guest=1)|
+
+Mesosphere Universe registry of packages made available for DC/OS Clusters.
+
+#### Table of Contents
+* [Universe Purpose](#universe-purpose)
+  * [Library Dependencies](#library-dependencies)
+* [Publish a Package](#publish-a-package-1)
+  * [Creating a Package](#creating-a-package)
+    * [`package.json`](#packagejson)
+      * [`.minDcosReleaseVersion`](#mindcosreleaseversion)
+    * [`config.json`](#configjson)
+    * [`marathon.json.mustache`](#marathonjsonmustache)
+    * [`command.json`](#commandjson)
+    * [`resource.json`](#resourcejson)
+      * [Docker Images](#docker-images)
+      * [Images](#images)
+      * [CLI Resources](#cli-resources)
+  * [Submit your Package](#submit-your-package)
+* [Repository Consumption](#repository-consumption-1)
+  * [Universe Server](#universe-server)
+    * [Build Universe Server locally](#build-universe-server-locally)
+    * [Run Universe Server](#run-universe-server)
+  * [Consumption Protocol](#consumption-protocol)
+  * [Supported DC/OS Versions](#supported-dcos-versions)
+
+
+## Universe Purpose
+You can publish and store packages in the Universe repository. The packages can then be consumed by DC/OS. This git repo facilitates these three necessary functions - to publish, store and consume packages. You can publish and store packages in the Universe repository. The packages can then be consumed by DC/OS. If you are new to Universe and Packages, this [Get Started Guide](docs/tutorial/GetStarted.md) is highly recommended.
+
+### Library dependencies
+* [jq](https://stedolan.github.io/jq/download/) is installed in your environment.
+* `python3` is installed in your environment.
+* Docker is installed in your environment.
+
+### Publish a Package
+
+To publish a package to Universe, fork this repo and open a Pull Request. A set of automated builds will be run against
+the Pull Request to ensure the modifications made in the PR leave the Universe well formed.
+See [Creating a Package](#creating-a-package) for details.
+
+### Registry of Packages
+
+The registry of published packages is maintained as the contents of this repo in the `repo/packages` directory. As of
+repository version `3.0` multiple packaging versions are allowed to co-exist in the same repository. Validation of
+packages are coordinated based on the packaging version defined in `package.json`.
+
+### Repository Consumption
+
+In order for published packages to be consumed and installed in a DC/OS Cluster the Universe Server needs to be built
+and run in a location accessible by the DC/OS Cluster. See [Universe Server](#universe-server) for details on
+building the Universe artifacts and Server.
+
+## Publish a Package
+
+### Creating a Package
+
+Each package has its own directory, with one subdirectory for each package revision. Each package revision directory
+contains the set of files necessary to create a consumable package that can be used by a DC/OS Cluster to install
+the package.
+```
+└── repo/package/F/foo
+    ├── 0
+    │   ├── command.json
+    │   ├── config.json
+    │   ├── marathon.json.mustache
+    │   ├── resource.json
+    │   └── package.json
+    ├── 1
+    │   ├── command.json
+    │   ├── config.json
+    │   ├── marathon.json.mustache
+    │   ├── resource.json
+    │   └── package.json
+    └── ...
+```
+
+
+#### `package.json`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|required|
+|3.0|required|
+|4.0|required|
+
+Every package in Universe must have a `package.json` file which specifies the high level metadata about the package.
+
+Currently, a package can specify one of three values for `.packagingVersion`
+either `2.0` or `3.0` or `4.0`; which version is declared
+will dictate which other files are required for the complete package as well as the schema(s) all the files must
+adhere to. Below is a snippet that represents a version `4.0` package.
+
+See [`repo/meta/schema/package-schema.json`](repo/meta/schema/package-schema.json) for the full json schema outlining
+what properties are available for each corresponding version of a package.
+
+```json
+{
+  "packagingVersion": "4.0",
+  "name": "foo",
+  "version": "1.2.3",
+  "tags": ["mesosphere", "framework"],
+  "maintainer": "help@bar.io",
+  "description": "Does baz.",
+  "scm": "https://github.com/bar/foo.git",
+  "website": "http://bar.io/foo",
+  "framework": true,
+  "upgradesFrom": ["1.2.2"],
+  "downgradesTo": ["1.2.2"],
+  "minDcosReleaseVersion": "1.10",
+  "postInstallNotes": "Have fun foo-ing and baz-ing!"
+}
+```
+
+For the first version of the package, add this line to the beginning of `preInstallNotes`: ```This DC/OS Service is currently in preview. There may be bugs, incomplete features, incorrect documentation, or other discrepancies. Preview packages should never be used in production!``` It will be removed once the package has been tested and used by the community.
+
+###### `.minDcosReleaseVersion`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|not supported|
+|3.0|optional|
+|4.0|optional|
+
+Introduced in `packagingVersion` `3.0`, `.minDcosReleaseVersion` can be specified as a property of `package.json`.
+When `.minDcosReleaseVersion` is specified the package will only be made available to DC/OS clusters with a DC/OS
+Release Version greater than or equal to (`>=`) the value specified.
+
+For example, `"minDcosReleaseVersion" : "1.8"` will prevent the package from being installed on clusters older than DC/OS 1.8.
+
+###### `.upgradesFrom`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|not supported|
+|3.0|not supported|
+|4.0|optional|
+
+Introduced in `packagingVersion` `4.0`, `.upgradesFrom` can be specified as a property of `package.json`.
+When `.upgradesFrom` is specified this indicates to users that the package is able to upgrade from any of
+the versions listed in the property. It is the resposibility of the package creator to make sure that this
+is indeed the case.
+
+###### `.downgradesTo`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|not supported|
+|3.0|not supported|
+|4.0|optional|
+
+Introduced in `packagingVersion` `4.0`, `.downgradesTo` can be specified as a property of `package.json`.
+When `.downgradesTo` is specified this indicates to users that the package is able to downgrade to any of
+the versions listed in the property. It is the resposibility of the package creator to make sure that this
+is indeed the case.
+
+#### `config.json`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|optional|
+|3.0|optional|
+|4.0|optional|
+
+This file describes the configuration properties supported by the package, represented as a
+[json-schema](http://spacetelescope.github.io/understanding-json-schema/). Each property can specify whether or not it
+is required, a default value, as well as some basic validation.
+
+Users can then [override specific values](https://docs.mesosphere.com/1.7/usage/services/config/) at
+installation time by passing an options file to the DC/OS CLI or by setting config values through the
+DC/OS UI (since DC/OS 1.7).
+
+```json
+{
+  "type": "object",
+  "properties": {
+    "foo": {
+      "type": "object",
+      "properties": {
+        "baz": {
+          "type": "integer",
+          "description": "How many times to do baz.",
+          "minimum": 0,
+          "maximum": 16,
+          "required": false,
+          "default": 4
+        }
+      },
+      "required": ["baz"]
+    }
+  },
+  "required": ["foo"]
+}
+```
+
+
+#### `marathon.json.mustache`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|required|
+|3.0|optional|
+|4.0|optional|
+
+This file is a [mustache template](http://mustache.github.io/) that when rendered will create a
+[Marathon](http://github.com/mesosphere/marathon) app definition capable of running your service.
+
+Variables in the mustache template will be evaluated from a union object created by merging three objects in the
+following order:
+
+1. Defaults specified in `config.json`
+
+2. User supplied options from either the DC/OS CLI or the DC/OS UI
+
+3. The contents of `resource.json`
+
+```json
+{
+  "id": "foo",
+  "cpus": 1.0,
+  "mem": 1024,
+  "instances": 1,
+  "args": ["{{{foo.baz}}}"],
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{resource.assets.container.docker.foo23b1cfe8e04a}}",
+      "network": "BRIDGE",
+      "portMappings": [
+        {
+          "containerPort": 8080,
+          "hostPort": 0,
+          "servicePort": 0,
+          "protocol": "tcp"
+        }
+      ]
+    }
+  }
+}
+```
+
+See the
+[Marathon API Documentation](https://mesosphere.github.io/marathon/docs/rest-api.html)
+for more detailed instruction on app definitions.
+
+#### `command.json`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|optional|
+|3.0|optional **[Deprecated]**|
+|4.0|not supported|
+
+As of `packagingVersion` `3.0`, `command.json` is deprecated in favor of the `.cli` property of `resource.json`.
+See [CLI Resources](#cli-resources) for details.
+
+Describes how to install the package's CLI via pip, the Python package manager. This document represents the
+format of a Pip requirements file where each element in the array is a line in the requirements file.
+
+```json
+{
+  "pip": [
+    "https://pypi.python.org/packages/source/f/foo/foo-1.2.3.tar.gz"
+  ]
+}
+```
+
+Packaging version 4.0 does not support command.json. The presence of command.json in the
+directory will fail the universe validation.
+
+#### `resource.json`
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|optional|
+|3.0|optional|
+|4.0|optional|
+
+This file contains all of the externally hosted resources (E.g. Docker images, HTTP objects and
+images) needed to install the application.
+
+See [`repo/meta/schema/v2-resource-schema.json`](repo/meta/schema/v2-resource-schema.json) and
+[`repo/meta/schema/v3-resource-schema.json`](repo/meta/schema/v3-resource-schema.json) for the full
+json schema outlining what properties are available for each corresponding version of a package.
+
+```json
+{
+  "images": {
+    "icon-small": "http://some.org/foo/small.png",
+    "icon-medium": "http://some.org/foo/medium.png",
+    "icon-large": "http://some.org/foo/large.png",
+    "screenshots": [
+      "http://some.org/foo/screen-1.png",
+      "http://some.org/foo/screen-2.png"
+    ]
+  },
+  "assets": {
+    "uris": {
+      "log4j-properties": "http://some.org/foo/log4j.properties"
+    },
+    "container": {
+      "docker": {
+        "23b1cfe8e04a": "some-org/foo:1.0.0"
+      }
+    }
+  }
+}
+```
+
+##### Docker Images
+
+For the Docker image, please use the image ID for the referenced image. You can find this by
+pulling the image locally and running `docker images some-org/foo:1.0.0`.
+
+##### Images
+
+While `images` is an optional field, it is highly recommended you include icons and screenshots
+in `resource.json` and update the path definitions accordingly. Specifications are as follows:
+
+* `icon-small`: 48px (w) x 48px (h)
+* `icon-medium`: 96px (w) x 96px (h)
+* `icon-large`: 256px (w) x 256px (h)
+* `screenshots[...]`: 1200px (w) x 675px (h)
+
+**NOTE:** To ensure your service icons look beautiful on retina-ready displays,
+please supply 2x versions of all icons. No changes are needed to
+`resource.json` - simply supply an additional icon file with the text `@2x` in
+the name before the file extension.
+For example, the icon `icon-cassandra-small.png` would have a retina-ready
+alternate image named `icon-cassandra-small@2x.png`.
+
+##### CLI Resources
+
+|Packaging Version|   |
+|-----------------|---|
+|2.0|not supported|
+|3.0|optional|
+|4.0|optional|
+
+The new `.cli` property allows for a package to configure native CLI subcommands for several platforms and
+architectures.
+
+```json
+{
+  "cli":{
+    "binaries":{
+      "darwin":{
+        "x86-64":{
+          "contentHash":[
+            { "algo": "sha256", "value": "..." }
+          ],
+          "kind": "executable",
+          "url":"https://some.org/foo/1.0.0/cli/darwin/dcos-foo"
+        }
+      },
+      "linux":{
+        "x86-64":{
+          "contentHash":[
+            { "algo":"sha256", "value":"..." }
+          ],
+          "kind":"executable",
+          "url":"https://some.org/foo/1.0.0/cli/linux/dcos-foo"
+        }
+      },
+      "windows":{
+        "x86-64":{
+          "contentHash":[
+            { "algo":"sha256", "value":"..." }
+          ],
+          "kind":"executable",
+          "url":"https://some.org/foo/1.0.0/cli/windows/dcos-foo"
+        }
+      }
+    }
+  }
+}
+```
+
+### Submit your Package
+
+Developers are invited to publish a package containing their DC/OS Service by submitting a Pull Request targeted at
+the `version-3.x` branch of this repo.
+
+Full Instructions:
+
+1. Fork this repo and clone the fork:
+
+  ```bash
+  git clone https://github.com/<user>/universe.git /path/to/universe
+  ```
+
+2. Run the verification and build script:
+
+  ```bash
+  scripts/build.sh
+  ```
+
+3. Verify all build steps completed successfully
+4. Submit a pull request against the `version-3.x` branch with your changes. Every pull request opened will have a set
+   of automated verifications run against it. These automated verification are reported against the pull request using
+   the GitHub status API. All verifications must pass in order for a pull request to be eligible for merge.
+
+5. Respond to manual review feedback provided by the DC/OS Community.
+  * Each Pull Request to Universe will also be manually reviewed by a member of the DC/OS Community. To ensure your
+    package is able to be made available to users as quickly as possible be sure to respond to the feedback provided.
+6. Add a getting started example of how to install and use the DC/OS package. To add the example, fork the [`examples`](https://github.com/dcos/examples) repo and send in a pull request. Re-use the format from the existing examples there.
+
+
+## Repository Consumption
+
+In order for Universe to be consumed by DC/OS the build process needs to be run to create the Universe Server.
+
+### Universe Server
+
+Universe Server is a new component introduce alongside `packagingVersion` `3.0`. In order for Universe to be able to
+provide packages for many versions of DC/OS at the same time, it is necessary for a server to be responsible for serving
+the correct set of packages to a cluster based on the cluster's version.
+
+All Pull Requests opened for Universe and the `version-3.x` branch will have their Docker image built and published
+to the DockerHub image [`mesosphere/universe-server`](https://hub.docker.com/r/mesosphere/universe-server/).
+In the artifacts tab of the build results you can find `docker/server/marathon.json` which can be used to run the
+Universe Server for testing in your DC/OS cluster.  For each Pull Request, click the details link of the "Universe Server
+Docker image" status report to view the build results.
+
+#### Build Universe Server locally
+
+1. Validate and build the Universe artifacts
+  ```bash
+  scripts/build.sh
+  ```
+
+2. Build the Universe Server Docker image
+  ```bash
+  DOCKER_TAG="my-package" docker/server/build.bash
+  ```
+  This will create a Docker image `universe-server:my-package` and `docker/server/target/marathon.json` on your local machine
+
+3. If you would like to publish the built Docker image, run
+  ```bash
+  DOCKER_TAG="my-package" docker/server/build.bash publish
+  ```
+
+#### Run Universe Server
+
+Using the `marathon.json` that is created when building Universe Server we can run a Universe Server in our DC/OS
+Cluster which can then be used to install packages.
+
+Run the following commands to configure DC/OS to use the custom Universe Server (DC/OS 1.8+):
+
+```bash
+dcos marathon app add marathon.json
+dcos package repo add --index=0 dev-universe http://universe.marathon.mesos:8085/repo
+```
+
+For DC/OS 1.7, a different URL must be used:
+
+```bash
+dcos marathon app add marathon.json
+dcos package repo add --index=0 dev-universe http://universe.marathon.mesos:8085/repo-1.7
+```
+
+### Consumption Protocol
+
+A DC/OS Cluster can be configured to point to multiple Universe Servers; each Universe Server will be fetched via
+HTTPS or HTTP. When a DC/OS Cluster attempts to fetch the package set from a Universe Server, the Universe Server
+will provide ONLY those packages which can be run on the cluster.
+
+For example:
+A DC/OS 1.6.1 Cluster will only receive packages with a `minDcosReleaseVersion` less than or equal to (`<=`) `1.6.1`
+in the format the DC/OS Cluster expects.
+
+```
+ +----------------------+   +-----------------------+
+ │public universe server│   │private universe server│
+ +----------------------+   +-----------------------+
+                http \         / http
+                      \       /
+                       \     /
+                       +-----+           +--------+
+                       │DC/OS│-----------│Marathon│
+                       +-----+    http   +--------+
+```
+
+## Supported DC/OS Versions
+Currently Universe Server provides support for the following versions of DC/OS
+
+| DC/OS Release Version | Support Level |
+|-----------------------|---------------|
+| 1.9                   | Full Support  |
+| 1.10                  | Full Support  |
+| 1.11                  | Full Support  |
diff --git a/dcos-universe/converter/Dockerfile b/dcos-universe/converter/Dockerfile
new file mode 100644
index 0000000..21bac8a
--- /dev/null
+++ b/dcos-universe/converter/Dockerfile
@@ -0,0 +1,17 @@
+# Use an official Python runtime as a base image
+FROM python:3
+
+# Set the working directory to the /converter directory
+WORKDIR /converter
+
+# Copy the required contents into the container at /converter
+ADD converter/service/converter.py          /converter
+ADD scripts/gen_universe.py                 /converter
+ADD repo/meta/schema                        /converter/repo/meta/schema
+ADD scripts/requirements/requirements.txt   /converter/requirements.txt
+
+# Install the required dependencies
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Run converter.py when the container launches. -u to 'unbuffer' the output
+CMD ["python3", "-u", "converter.py"]
diff --git a/dcos-universe/converter/build.bash b/dcos-universe/converter/build.bash
new file mode 100755
index 0000000..578a599
--- /dev/null
+++ b/dcos-universe/converter/build.bash
@@ -0,0 +1,50 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+DOCKER_TAG=${DOCKER_TAG:-"dev"}
+DOCKER_IMAGE=${DOCKER_IMAGE:-"mesosphere/universe-converter"}
+DOCKER_IMAGE_AND_TAG="${DOCKER_IMAGE}:${DOCKER_TAG}"
+
+CONVERTER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+function clean {
+
+  rm -rf ${CONVERTER_DIR}/target
+
+}
+
+function build {
+
+  # Check if the jq processor is installed correctly
+  if ! command -v jq > /dev/null 2>&1; then
+    echo "jq is required. Please install!"
+    exit 1
+  fi
+
+  mkdir -p ${CONVERTER_DIR}/target
+
+  msg "Building docker image ${DOCKER_IMAGE_AND_TAG}"
+  docker build -t "${DOCKER_IMAGE_AND_TAG}" -f converter/Dockerfile .
+  msg "Built docker image ${DOCKER_IMAGE_AND_TAG}"
+
+  cat ${CONVERTER_DIR}/marathon.json | jq ".container.docker.image |= \"${DOCKER_IMAGE_AND_TAG}\"" > ${CONVERTER_DIR}/target/marathon.json
+
+  msg "Output written to ${CONVERTER_DIR}/target/marathon.json"
+
+}
+
+function publish {
+
+  docker push "${DOCKER_IMAGE_AND_TAG}"
+
+}
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function println { printf '%s\n' "$(now) $*" ;}
+function msg { println "$*" >&2 ;}
+
+######################### Delegates to subcommands or runs main, as appropriate
+if [[ ${1:-} ]] && declare -F | cut -d' ' -f3 | fgrep -qx -- "${1:-}"
+then "$@"
+else build
+fi
diff --git a/dcos-universe/converter/marathon.json b/dcos-universe/converter/marathon.json
new file mode 100644
index 0000000..312d69e
--- /dev/null
+++ b/dcos-universe/converter/marathon.json
@@ -0,0 +1,34 @@
+{
+  "id": "/transform",
+  "instances": 1,
+  "cpus": 0.25,
+  "mem": 128,
+  "requirePorts": true,
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "mesosphere/universe-converter:dev"
+    }
+  },
+  "env" : {
+    "MAX_REPO_SIZE" : "20",
+    "LOGLEVEL" : "INFO"
+  },
+  "networks" : [
+    {
+      "mode": "HOST"
+    }
+  ],
+  "portDefinitions": [
+    {
+      "name" : "universeconverter",
+      "port" : 8086,
+      "protocol" : "tcp"
+    }
+  ],
+  "labels": {
+    "HAPROXY_GROUP": "external",
+    "HAPROXY_0_VHOST": "universe-converter.mesosphere.com",
+    "HAPROXY_0_REDIRECT_TO_HTTPS": "true"
+  }
+}
diff --git a/dcos-universe/converter/service/__init__.py b/dcos-universe/converter/service/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/dcos-universe/converter/service/converter.py b/dcos-universe/converter/service/converter.py
new file mode 100644
index 0000000..9a94f67
--- /dev/null
+++ b/dcos-universe/converter/service/converter.py
@@ -0,0 +1,226 @@
+#!/usr/bin/env python3
+
+import gen_universe
+import json
+import logging
+import os
+import re
+
+from enum import Enum
+from http import HTTPStatus
+from http.server import HTTPServer, BaseHTTPRequestHandler
+from urllib.error import URLError, HTTPError
+from urllib.parse import parse_qsl, urlparse
+from urllib.request import Request, urlopen
+
+
+# Binds to all available interfaces
+HOST_NAME = ''
+# Gets the port number from $PORT0 environment variable
+PORT_NUMBER = int(os.environ['PORT_UNIVERSECONVERTER'])
+MAX_REPO_SIZE = int(os.environ.get('MAX_REPO_SIZE', '20'))
+
+# Constants
+MAX_TIMEOUT = 60
+MAX_BYTES = MAX_REPO_SIZE * 1024 * 1024
+
+header_user_agent = 'User-Agent'
+header_accept = 'Accept'
+header_content_type = 'Content-Type'
+header_content_length = 'Content-Length'
+param_charset = 'charset'
+default_charset = 'utf-8'
+
+json_key_packages = 'packages'
+param_url = 'url'
+url_path = '/transform'
+
+
+def run_server(server_class=HTTPServer):
+    """Runs a builtin python server using the given server_class.
+
+    :param server_class: server
+    :type server_class: HTTPServer
+    :return: None
+    """
+    server_address = (HOST_NAME, PORT_NUMBER)
+    httpd = server_class(server_address, Handler)
+    logger.warning('Server Starts on port - %s', PORT_NUMBER)
+    try:
+        httpd.serve_forever()
+    except KeyboardInterrupt:
+        httpd.server_close()
+        logger.warning('Server Stops on port - %s', PORT_NUMBER)
+
+
+class Handler(BaseHTTPRequestHandler):
+    def do_GET(s):
+        """
+        Respond to the GET request. The expected format of this request is:
+                http://<host>:<port>/transform?url=<url> with `User-Agent`
+                and `Accept` headers
+        """
+        errors = _validate_request(s)
+        if errors:
+            s.send_error(HTTPStatus.BAD_REQUEST, explain=errors)
+            return
+
+        query = dict(parse_qsl(urlparse(s.path).query))
+        if param_url not in query:
+            s.send_error(HTTPStatus.BAD_REQUEST,
+                         explain=ErrorResponse.PARAM_NOT_PRESENT.to_msg(param_url))
+            return
+
+        logging.debug(">>>>>>>>>")
+
+        user_agent = s.headers.get(header_user_agent)
+        accept = s.headers.get(header_accept)
+        decoded_url = query.get(param_url)
+
+        try:
+            json_response = handle(decoded_url, user_agent, accept)
+        except Exception as e:
+            s.send_error(HTTPStatus.BAD_REQUEST, explain=str(e))
+            return
+
+        s.send_response(HTTPStatus.OK)
+        content_header = gen_universe.format_universe_repo_content_type(
+            _get_repo_version(accept))
+        s.send_header(header_content_type, content_header)
+        s.send_header(header_content_length, len(json_response))
+        s.end_headers()
+        s.wfile.write(json_response.encode())
+
+
+def handle(decoded_url, user_agent, accept):
+    """Returns the requested json data. May raise an error instead, if it fails.
+
+    :param decoded_url: The url to be fetched from
+    :type decoded_url: str
+    :param user_agent: User-Agent header value
+    :type user_agent: str
+    :param accept: Accept header value
+    :return Requested json data
+    :rtype str (a valid json object)
+    """
+    logger.debug('Url : %s\n\tUser-Agent : %s\n\tAccept : %s',
+                 decoded_url, user_agent, accept)
+    repo_version = _get_repo_version(accept)
+    dcos_version = _get_dcos_version(user_agent)
+    logger.debug('Version %s\nDC/OS %s', repo_version, dcos_version)
+
+    req = Request(decoded_url)
+    req.add_header(header_user_agent, user_agent)
+    req.add_header(header_accept, accept)
+    try:
+        with urlopen(req, timeout=MAX_TIMEOUT) as res:
+            charset = res.info().get_param(param_charset) or default_charset
+
+            if header_content_length not in res.headers:
+                raise ValueError(ErrorResponse.ENDPOINT_HEADER_MISS.to_msg())
+
+            if int(res.headers.get(header_content_length)) > MAX_BYTES:
+                raise ValueError(ErrorResponse.MAX_SIZE.to_msg())
+
+            raw_data = res.read()
+        packages = json.loads(raw_data.decode(charset)).get(json_key_packages)
+    except (HTTPError, URLError) as error:
+        logger.info("Request protocol error %s", decoded_url)
+        logger.exception(error)
+        raise error
+
+    return render_json(packages, dcos_version, repo_version)
+
+
+def render_json(packages, dcos_version, repo_version):
+    """Returns the json
+
+    :param packages: package dictionary
+    :type packages: dict
+    :param dcos_version: version of dcos
+    :type dcos_version: str
+    :param repo_version: version of universe repo
+    :type repo_version: str
+    :return filtered json data based on parameters
+    :rtype str
+    """
+    processed_packages = gen_universe.filter_and_downgrade_packages_by_version(
+        packages,
+        dcos_version
+    )
+    packages_dict = {json_key_packages: processed_packages}
+    errors = gen_universe.validate_repo_with_schema(
+        packages_dict,
+        repo_version
+    )
+    if len(errors) != 0:
+        logger.error(errors)
+        raise ValueError(ErrorResponse.VALIDATION_ERROR.to_msg(errors))
+    return json.dumps(packages_dict)
+
+
+def _validate_request(s):
+    """
+
+    :param s: The in built base http request handler
+    :type s: BaseHTTPRequestHandler
+    :return Error message (if any)
+    :rtype String or None
+    """
+    if not urlparse(s.path).path == url_path:
+        return ErrorResponse.INVALID_PATH.to_msg(s.path)
+
+    if header_user_agent not in s.headers:
+        return ErrorResponse.HEADER_NOT_PRESENT.to_msg(header_user_agent)
+
+    if header_accept not in s.headers:
+        return ErrorResponse.HEADER_NOT_PRESENT.to_msg(header_accept)
+
+
+def _get_repo_version(accept_header):
+    """Returns the version of the universe repo parsed.
+
+    :param accept_header: String
+    :return repo version as a string or raises Error
+    :rtype str or raises an Error
+    """
+    result = re.findall(r'\bversion=v\d', accept_header)
+    if result is None or len(result) is 0:
+        raise ValueError(ErrorResponse.UNABLE_PARSE.to_msg(accept_header))
+    result.sort(reverse=True)
+    return str(result[0].split('=')[1])
+
+
+def _get_dcos_version(user_agent_header):
+    """Parses the version of dcos from the specified header.
+
+    :param user_agent_header: String
+    :return dcos version as a string or raises an Error
+    :rtype str or raises an Error
+    """
+    result = re.search(r'\bdcos/\b\d\.\d{1,2}', user_agent_header)
+    if result is None:
+        raise ValueError(ErrorResponse.UNABLE_PARSE.to_msg(user_agent_header))
+    return str(result.group().split('/')[1])
+
+
+class ErrorResponse(Enum):
+    INVALID_PATH = 'URL Path {} is invalid. Expected path /transform'
+    HEADER_NOT_PRESENT = 'Header {} is missing'
+    PARAM_NOT_PRESENT = 'Request parameter {} is missing'
+    UNABLE_PARSE = 'Unable to parse header {}'
+    VALIDATION_ERROR = 'Validation errors during processing {}'
+    MAX_SIZE = 'Endpoint response exceeds maximum content size'
+    ENDPOINT_HEADER_MISS = 'Endpoint doesn\'t return Content-Length header'
+
+    def to_msg(self, *args):
+        return self.value.format(args)
+
+
+if __name__ == '__main__':
+    logger = logging.getLogger(__name__)
+    logging.basicConfig(
+        level=os.environ.get("LOGLEVEL", "INFO"),
+        format='%(asctime)s [%(levelname)s] %(message)s'
+    )
+    run_server()
diff --git a/dcos-universe/docker/local-universe/Dockerfile b/dcos-universe/docker/local-universe/Dockerfile
new file mode 100644
index 0000000..96111cf
--- /dev/null
+++ b/dcos-universe/docker/local-universe/Dockerfile
@@ -0,0 +1,10 @@
+FROM universe-base
+
+COPY http /usr/share/nginx/html/
+COPY registry /var/lib/registry/
+COPY universe/target/* /usr/share/nginx/html/
+
+RUN chmod 755 /usr/share/nginx/html/ \
+  && for f in $(find /usr/share/nginx/html/ -type f);do chmod 644 $f;done \
+  && for d in $(find /usr/share/nginx/html/ -type d);do chmod 755 $d;done \
+  && ls -alR /usr/share/nginx/html/
diff --git a/dcos-universe/docker/local-universe/Dockerfile.base b/dcos-universe/docker/local-universe/Dockerfile.base
new file mode 100644
index 0000000..dcc45c0
--- /dev/null
+++ b/dcos-universe/docker/local-universe/Dockerfile.base
@@ -0,0 +1,29 @@
+FROM registry:2.4.1
+
+RUN apt-key adv --keyserver hkp://pgp.mit.edu:80 --recv-keys 573BFD6B3D8FBC641079A6ABABF5BD827BD9BF62 \
+  && echo "deb http://nginx.org/packages/debian/ jessie nginx" >> /etc/apt/sources.list \
+  && apt-get update \
+  && apt-get install --no-install-recommends --no-install-suggests -y \
+            ca-certificates \
+            nginx \
+            nginx-module-xslt \
+            nginx-module-geoip \
+            nginx-module-image-filter \
+            nginx-module-perl \
+            nginx-module-njs \
+            gettext-base \
+  && rm -rf /var/lib/apt/lists/*
+
+# forward request and error logs to docker log collector
+RUN ln -sf /dev/stdout /var/log/nginx/access.log \
+  && ln -sf /dev/stderr /var/log/nginx/error.log
+
+COPY certs /certs
+COPY certs/domain.crt /usr/share/nginx/html/certs/domain.crt
+
+COPY registry-config.yml /etc/docker/registry/config.yml
+COPY default.conf /etc/nginx/conf.d/default.conf
+
+EXPOSE 80 443 5000
+
+ENTRYPOINT []
diff --git a/dcos-universe/docker/local-universe/Dockerfile.static b/dcos-universe/docker/local-universe/Dockerfile.static
new file mode 100644
index 0000000..cc6c14f
--- /dev/null
+++ b/dcos-universe/docker/local-universe/Dockerfile.static
@@ -0,0 +1,30 @@
+## The versions in this file are hardcoded for audit purposes, and should not be changed.
+FROM registry:2.4.1
+
+ENV NGINX_VERSION 1.12.0-1~jessie
+ENV NJS_VERSION 1.12.0.0.1.10-1~jessie
+ENV GETTEXT_VERSION 0.19.3-2
+
+RUN apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 573BFD6B3D8FBC641079A6ABABF5BD827BD9BF62 \
+  && echo "deb http://nginx.org/packages/debian/ jessie nginx" >> /etc/apt/sources.list \
+  && apt-get update \
+  && apt-get install --no-install-recommends --no-install-suggests -y \
+            nginx=${NGINX_VERSION} \
+            nginx-module-xslt=${NGINX_VERSION} \
+            nginx-module-geoip=${NGINX_VERSION} \
+            nginx-module-image-filter=${NGINX_VERSION} \
+            nginx-module-perl=${NGINX_VERSION} \
+            nginx-module-njs=${NJS_VERSION} \
+            gettext-base=${GETTEXT_VERSION} \
+  && rm -rf /var/lib/apt/lists/*
+
+# forward request and error logs to docker log collector
+RUN ln -sf /dev/stdout /var/log/nginx/access.log \
+  && ln -sf /dev/stderr /var/log/nginx/error.log
+
+COPY registry-config.yml /etc/docker/registry/config.yml
+COPY default.conf /etc/nginx/conf.d/default.conf
+
+EXPOSE 80 443 5000
+
+ENTRYPOINT []
diff --git a/dcos-universe/docker/local-universe/Dockerfile.static.base b/dcos-universe/docker/local-universe/Dockerfile.static.base
new file mode 100644
index 0000000..71c5c4b
--- /dev/null
+++ b/dcos-universe/docker/local-universe/Dockerfile.static.base
@@ -0,0 +1,5 @@
+# This adds custom certificates to the static universe image.
+FROM universe-static:0.2-2
+
+COPY certs /certs
+COPY certs/domain.crt /usr/share/nginx/html/certs/domain.crt
diff --git a/dcos-universe/docker/local-universe/Makefile b/dcos-universe/docker/local-universe/Makefile
new file mode 100644
index 0000000..70cb6ad
--- /dev/null
+++ b/dcos-universe/docker/local-universe/Makefile
@@ -0,0 +1,54 @@
+REPO_BASE_DIR ?= $(shell pwd -P)/../..
+static_version ?= 0.2-2
+static_image ?= universe-static:$(static_version)
+
+.PHONY: certs base clean gen-universe
+
+certs:
+	mkdir certs && openssl req \
+		-newkey rsa:4096 -nodes -sha256 -keyout certs/domain.key \
+		-x509 -days 365 -out certs/domain.crt \
+		-subj "/CN=master.mesos"
+
+base: clean certs
+	docker build -t universe-base -f Dockerfile.base .
+
+clean:
+	rm -rf certs
+	rm -f local-universe.tar.gz
+
+static-build:
+	docker build -t $(static_image) -f Dockerfile.static .
+
+static-online:
+	docker pull mesosphere/$(static_image)
+	docker tag mesosphere/$(static_image) $(static_image)
+
+static-base:
+	docker build -t universe-base -f Dockerfile.static.base .
+
+ifdef DCOS_PACKAGE_INCLUDE
+gen-universe: clean
+	python3 $(REPO_BASE_DIR)/scripts/local-universe.py \
+		--repository $(REPO_BASE_DIR)/repo/packages/ \
+		--include "$(DCOS_PACKAGE_INCLUDE)" \
+		--dcos_version "$(DCOS_VERSION)"
+else
+gen-universe: clean
+	python3 $(REPO_BASE_DIR)/scripts/local-universe.py \
+		--repository $(REPO_BASE_DIR)/repo/packages/ \
+		--selected \
+		--dcos_version "$(DCOS_VERSION)"
+endif
+
+local-universe: gen-universe clean
+	docker save -o local-universe.tar mesosphere/universe:latest
+	gzip local-universe.tar
+
+old-local-universe: clean
+	python3 $(REPO_BASE_DIR)/scripts/old-local-universe.py \
+		--repository $(REPO_BASE_DIR)/repo/packages/ \
+		--selected \
+		--dcos_version "$(DCOS_VERSION)"
+	docker save -o local-universe.tar mesosphere/universe:latest
+	gzip local-universe.tar
diff --git a/dcos-universe/docker/local-universe/README.md b/dcos-universe/docker/local-universe/README.md
new file mode 100644
index 0000000..cca50fe
--- /dev/null
+++ b/dcos-universe/docker/local-universe/README.md
@@ -0,0 +1,250 @@
+
+## Using
+
+1. If you're using the latest stable release of DC/OS, then download the [local-universe]
+(https://downloads.mesosphere.com/universe/public/local-universe.tar.gz) container to each of your
+masters. Otherwise, you'll need to build the container for your DC/OS version (see the "Building
+Your Own" section below) and copy it to each of your masters.
+
+1. Load the container into the local docker instance on each master:
+
+    ```bash
+    $ docker load < local-universe.tar.gz
+    ```
+
+1. Add the [`dcos-local-universe-http.service`](dcos-local-universe-http.service) definition to
+each of your masters at `/etc/systemd/system/dcos-local-universe-http.service` and then start it.
+
+    ```bash
+    $ cp dcos-local-universe-http.service /etc/systemd/system/dcos-local-universe-http.service
+    $ systemctl daemon-reload
+    $ systemctl start dcos-local-universe-http
+    ```
+
+1. Add the [`dcos-local-universe-registry.service`](dcos-local-universe-registry.service)
+definition to each of your masters at `/etc/systemd/system/dcos-local-universe-registry.service`
+and then start it.
+
+    ```bash
+    $ cp dcos-local-universe-registry.service /etc/systemd/system/dcos-local-universe-registry.service
+    $ systemctl daemon-reload
+    $ systemctl start dcos-local-universe-registry
+    ```
+
+1. Remove the built in repositories from the host that you have the DCOS-CLI installed on
+(alternatively, these can be removed from the DCOS UI under System>Repositories).
+
+    ```bash
+    $ dcos package repo remove Universe
+    $ dcos package repo remove Universe-1.7
+    ```
+
+1. Add the local repository by using the DCOS-CLI.
+
+    ```bash
+    $ dcos package repo add local-universe http://master.mesos:8082/repo
+    ```
+
+1. To pull from this new repository, you'll need to setup the docker daemon on every agent to have
+a valid SSL certificate. To do this, on every agent in your cluster, run the following:
+
+    ```bash
+    $ mkdir -p /etc/docker/certs.d/master.mesos:5000
+    $ curl -o /etc/docker/certs.d/master.mesos:5000/ca.crt http://master.mesos:8082/certs/domain.crt
+    $ systemctl restart docker
+    ```
+
+    Note that you're welcome to use the instructions for insecure registries instead of this step.
+    We don't recommend this.
+
+### FAQ
+
+- I can't install CLI subcommands.
+
+    Packages are being hosted at `master.mesos:8082`. If you cannot resolve (or connect) to that
+    from your DC/OS CLI install, you won't be able to install subcommands. If you're able to
+    connect to port 8082 on your masters, the easiest way around this is adding the IP for one of
+    the masters to `/etc/hosts`.  See also [Outside Resources](#outside-resources) below.
+
+- The images are broken!
+
+    We host everything from inside your cluster, including the images. They're getting served up by
+    `master.mesos:8082`. If you have connectivity to that IP, you can add it to `/etc/hosts` and
+    get the images working.   See also [Outside Resources](#outside-resources) below.
+
+- I don't see the package I was looking for!
+
+    By default, we only bundle the `selected` packages. If you'd like to include something else,
+    please see the "Building Your Own" section below.
+
+## Building Your Own
+
+1. Both nginx and the docker registry get bundled into the same container. This requires building
+the "universe-base" container before you actually compile the universe container.
+
+    ```bash
+    $ sudo make base
+    ```
+
+1. Once you've built the "universe-base" container, you'll be able to create a local-universe one.
+To keep size and time down, it is common to select only what you'd like to see. By default,
+`selected` applications are the only ones included. You can pass a list in if you'd like to see
+something more than that.
+
+    ```bash
+    $ sudo make DCOS_VERSION=<your DC/OS version> local-universe
+    ```
+
+    If you want to specify the set of package names and versions that get included in the image you
+    can user the `DCOS_PACKAGE_INCLUDE variable. For example, to include two versions for Cassandra
+    and one version of Marathon you can execute the following command.
+
+    ```bash
+    $ sudo make DCOS_VERSION=<your DC/OS version> DCOS_PACKAGE_INCLUDE="cassandra:1.0.25-3.0.10,cassandra:1.0.24-3.0.10,marathon:1.4.2" local-universe
+    ```
+
+### Building Your Own, off a non-changing universe-static base image.
+
+Mesosphere provides a `mesosphere/universe-static` Docker image, which has all of the core
+requirements to run a local universe.  All that must be added are your own certificates and repo
+contents.
+
+1. Use the make command to download the static image
+
+    ```bash
+    $ sudo make static-online
+    ## Will pull and re-tag mesosphere/universe-static to universe-static
+    ```
+
+1. Create your certs (either use `make certs` or generate your own), and add them to the static
+image to create a `universe-base` image
+
+    ```bash
+    $ sudo make static-base
+    ## Will add certs to universe-static and create universe-base
+    ```
+
+1. Add content (see above)
+
+    ```bash
+    $ sudo make DCOS_VERSION=<your DC/OS version> local-universe
+    ```
+
+To generate your own static image, use and/or modify the make target `static-build` (e.g.,
+`make static-build` instead of `make static-online`)
+
+This leaves three total options:
+
+```bash
+make base
+make DCOS_VERSION=<your DC/OS version> local-universe
+```
+
+```bash
+make static-online
+make static-base
+make DCOS_VERSION=<your DC/OS version> local-universe
+```
+
+```bash
+make static-build
+make static-base
+make DCOS_VERSION=<your DC/OS version> local-universe
+```
+
+### Building an old version of Local Universe
+The latest version of Local Universe has changed directory structure in a way that is not
+compatible with the old version of Local Universe. If you are interested in creating an old version
+of Local Universe with the old directory structure you must use the following command instead of
+the `local-universe` make target describe previously.
+
+```bash
+$ sudo make DCOS_VERSION=<your DC/OS version> old-local-universe
+```
+
+### Outside Resources
+
+As a workaround for the image and CLI resource issues in [the FAQ above](#faq), you can place those
+assets outside of the cluster.
+
+1. Place your CLI and image resources on a web server accessible to CLI and web UI users.
+
+2. Edit the URLs in the `resource.json` file of your package of interest to point to each of those
+resources, in the `images` and `cli` sections.
+
+3. Edit Makefile so the call to `local-universe.py` includes arguments `--nonlocal_images` and
+`--nonlocal_cli`.
+
+4. Proceed with [Building Your Own](#building-your-own) as above.
+
+## Running Local Universe as a Marathon Service
+
+Instead of deploying to each of your masters, you can easily run a local universe as a regular
+Marathon service.
+
+1. Edit Makefile so the call to `local-universe.py` includes the `--server_url` argument indicating
+your choice of internal service name and port. For example, if you want dev-universe, then add
+`--server_url http://dev-universe.marathon.mesos:8085`.
+
+2. Build a container as per [Building Your Own](#building-your-own) above.
+
+3. Deploy the container to your cluster as you would one of your regular apps.
+
+4. Launch a single instance of your universe service, specifying health checks. Here's an example
+Marathon app:
+
+```json
+{
+  "id": "/dev-universe",
+  "instances": 1,
+  "cpus": 0.25,
+  "mem": 128,
+  "requirePorts": true,
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "network": "BRIDGE",
+      "image": "your_image_location:latest",
+      "forcePullImage": true,
+      "portMappings": [
+        {
+          "containerPort": 80,
+          "hostPort": 8085,
+          "protocol": "tcp"
+        }
+      ]
+    },
+    "volumes": []
+  },
+  "cmd": "nginx -g 'daemon off;'",
+  "fetch": [ ],
+  "healthChecks": [
+    {
+      "gracePeriodSeconds": 120,
+      "intervalSeconds": 30,
+      "maxConsecutiveFailures": 3,
+      "path": "/repo-empty-v3.json",
+      "portIndex": 0,
+      "protocol": "HTTP",
+      "timeoutSeconds": 5
+    }
+  ],
+  "constraints": [
+    [
+      "hostname",
+      "UNIQUE"
+    ]
+  ]
+}
+```
+
+## Making changes...
+
+###  ...to the `universe-static` image
+
+1. Update `Dockerfile.static` with the changes
+1. Bump the default value of `static_version` in `Makefile`
+1. Create a pull request with the above changes
+1. Once it is merged, run the [Publish universe-static image](https://teamcity.mesosphere.io/viewType.html?buildTypeId=Oss_Universe_2_PublishUniverseStaticImage&branch_Oss_Universe_2=%3Cdefault%3E&tab=buildTypeStatusDiv) build, providing the new version number when prompted
+1. Submit a pull request that updates the `FROM universe-static:...` line in
+`Dockerfile.static.base` to use the new version
diff --git a/dcos-universe/docker/local-universe/dcos-local-universe-http.service b/dcos-universe/docker/local-universe/dcos-local-universe-http.service
new file mode 100644
index 0000000..22cc569
--- /dev/null
+++ b/dcos-universe/docker/local-universe/dcos-local-universe-http.service
@@ -0,0 +1,16 @@
+[Unit]
+Description=DCOS: Serve the local universe (HTTP)
+After=docker.service
+
+[Service]
+Restart=always
+StartLimitInterval=0
+RestartSec=15
+TimeoutStartSec=120
+TimeoutStopSec=15
+ExecStartPre=-/usr/bin/docker kill %n
+ExecStartPre=-/usr/bin/docker rm %n
+ExecStart=/usr/bin/docker run --rm --name %n -p 8082:80 mesosphere/universe nginx -g "daemon off;"
+
+[Install]
+WantedBy=multi-user.target
diff --git a/dcos-universe/docker/local-universe/dcos-local-universe-registry.service b/dcos-universe/docker/local-universe/dcos-local-universe-registry.service
new file mode 100644
index 0000000..f251fff
--- /dev/null
+++ b/dcos-universe/docker/local-universe/dcos-local-universe-registry.service
@@ -0,0 +1,16 @@
+[Unit]
+Description=DCOS: Serve the local universe (Docker registry)
+After=docker.service
+
+[Service]
+Restart=always
+StartLimitInterval=0
+RestartSec=15
+TimeoutStartSec=120
+TimeoutStopSec=15
+ExecStartPre=-/usr/bin/docker kill %n
+ExecStartPre=-/usr/bin/docker rm %n
+ExecStart=/usr/bin/docker run --rm --name %n -p 5000:5000 -e REGISTRY_HTTP_TLS_CERTIFICATE=/certs/domain.crt -e REGISTRY_HTTP_TLS_KEY=/certs/domain.key mesosphere/universe registry serve /etc/docker/registry/config.yml
+
+[Install]
+WantedBy=multi-user.target
diff --git a/dcos-universe/docker/local-universe/default.conf b/dcos-universe/docker/local-universe/default.conf
new file mode 100644
index 0000000..455b9c9
--- /dev/null
+++ b/dcos-universe/docker/local-universe/default.conf
@@ -0,0 +1,95 @@
+server {
+  listen 80;
+  access_log /dev/stdout;
+  error_log /dev/stdout;
+
+  root   /usr/share/nginx/html;
+  index  index.html index.htm;
+
+  gzip_static on; # nginx will automatically search for a .gz version of a file and serve it if it exists
+
+  set $dcos_release_version 1.6.1;
+
+  set $serve_json false;
+
+  if ($http_accept ~ ".*application/vnd\.dcos\.universe\.repo\+json;charset=utf-8;version=v3.*") {
+    # The Accept header contains the v3 repo schema
+    set $serve_json true;
+    set $universe_version v3;
+  }
+
+  if ($http_accept ~ ".*application/vnd\.dcos\.universe\.repo\+json;charset=utf-8;version=v4.*") {
+    # The Accept header contains the v4 repo schema
+    set $serve_json true;
+    set $universe_version v4;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.8.*") {
+    set $dcos_release_version 1.8;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.9.*") {
+    set $dcos_release_version 1.9;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.10.*") {
+    set $dcos_release_version 1.10;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.11.*") {
+    set $dcos_release_version 1.11;
+  }
+
+  location = /repo-1.7 {
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v3" json;
+      application/zip zip;
+    }
+    set $compound_if "";
+    if ($serve_json = true) {
+      set $compound_if "${compound_if}json";
+    }
+    if ($dcos_release_version != "1.7") {
+      set $compound_if "${compound_if}notoneseven";
+    }
+
+    if ($compound_if ~ "jsonnotoneseven") {
+      rewrite ^/repo-1.7$ /repo-empty-v3.json break;
+    }
+
+    rewrite ^/repo-1.7$ /repo-up-to-1.7.zip break;
+  }
+
+  location = /repo {
+    types {
+      application/zip zip;
+    }
+
+    if ($universe_version = v3) {
+      rewrite ^/repo$ /v3;
+    }
+
+    if ($universe_version = v4) {
+      rewrite ^/repo$ /v4;
+    }
+
+    rewrite ^/repo$ /repo-up-to-$dcos_release_version.zip break;
+  }
+
+  location = /v3 {
+    internal;
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v3" json;
+    }
+    rewrite ^/v3$ /repo-up-to-$dcos_release_version.json break;
+  }
+
+  location = /v4 {
+    internal;
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v4" json;
+    }
+    rewrite ^/v4$ /repo-up-to-$dcos_release_version.json break;
+  }
+
+}
diff --git a/dcos-universe/docker/local-universe/docker-compose.yml b/dcos-universe/docker/local-universe/docker-compose.yml
new file mode 100644
index 0000000..c707a65
--- /dev/null
+++ b/dcos-universe/docker/local-universe/docker-compose.yml
@@ -0,0 +1,15 @@
+nginx:
+  image: mesosphere/universe
+  container_name: universe-http
+  ports:
+    - "8082:80"
+  command: nginx -g "daemon off;"
+registry:
+  image: mesosphere/universe
+  container_name: universe-registry
+  environment:
+      - REGISTRY_HTTP_TLS_CERTIFICATE=/certs/domain.crt
+      - REGISTRY_HTTP_TLS_KEY=/certs/domain.key
+  ports:
+    - "5000:5000"
+  command: registry serve /etc/docker/registry/config.yml
diff --git a/dcos-universe/docker/local-universe/registry-config.yml b/dcos-universe/docker/local-universe/registry-config.yml
new file mode 100644
index 0000000..b12a7a0
--- /dev/null
+++ b/dcos-universe/docker/local-universe/registry-config.yml
@@ -0,0 +1,23 @@
+version: 0.1
+log:
+  fields:
+    service: registry
+storage:
+    cache:
+        blobdescriptor: inmemory
+    filesystem:
+        rootdirectory: /var/lib/registry
+http:
+    addr: :5000
+    headers:
+        X-Content-Type-Options: [nosniff]
+health:
+  storagedriver:
+    enabled: true
+    interval: 10s
+    threshold: 3
+  maintenance:
+    uploadpurging:
+      enabled: false
+    readonly:
+      enabled: true
diff --git a/dcos-universe/docker/server/Dockerfile b/dcos-universe/docker/server/Dockerfile
new file mode 100644
index 0000000..354ba2c
--- /dev/null
+++ b/dcos-universe/docker/server/Dockerfile
@@ -0,0 +1,15 @@
+FROM nginx:1.10.0-alpine
+
+MAINTAINER Mesosphere Support <su...@mesosphere.io>
+
+RUN rm -f /etc/nginx/conf.d/default.conf /etc/nginx/conf.d/example_ssl.conf
+
+COPY nginx/ /
+
+COPY target/ /etc/nginx/html
+
+RUN  chmod 755 /etc \
+  && chmod 755 /etc/nginx \
+  && for f in $(find /etc/nginx/html -type f);do chmod 644 $f;done \
+  && for d in $(find /etc/nginx/html -type d);do chmod 755 $d;done \
+  && ls -alR /etc/nginx/html
diff --git a/dcos-universe/docker/server/build.bash b/dcos-universe/docker/server/build.bash
new file mode 100755
index 0000000..2139f61
--- /dev/null
+++ b/dcos-universe/docker/server/build.bash
@@ -0,0 +1,114 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+function globals {
+  export LC_ALL=en_US.UTF-8
+  export LANG="$LC_ALL"
+}; globals
+
+DOCKER_TAG=${DOCKER_TAG:-"dev"}
+DOCKER_IMAGE=${DOCKER_IMAGE:-"mesosphere/universe-server"}
+DOCKER_IMAGE_AND_TAG="${DOCKER_IMAGE}:${DOCKER_TAG}"
+
+DOCKER_SERVER_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+
+function clean {
+
+  rm -rf ${DOCKER_SERVER_DIR}/target
+
+}
+
+function prepare {
+
+  local universeBuildTarget="${DOCKER_SERVER_DIR}/../../target"
+
+  if [[ -f ${universeBuildTarget}/repo-empty-v3.json ]]; then
+    mkdir -p ${DOCKER_SERVER_DIR}/target
+    # copy over the build json repos
+    cp -r ${DOCKER_SERVER_DIR}/../../target/repo-*.json ${DOCKER_SERVER_DIR}/target
+    # copy over the build zip repos (only 1.6.1 and 1.7)
+    cp -r ${DOCKER_SERVER_DIR}/../../target/repo-*.zip ${DOCKER_SERVER_DIR}/target
+  else
+    err "Please run scripts/build.sh before trying to build universe server"
+  fi
+
+
+}
+
+function gzipJsonFiles {(
+
+  prepare
+
+  cd ${DOCKER_SERVER_DIR}/target
+
+  for f in $(ls -1 *.json); do
+    msg "GZipping $f"
+
+    # Alpine Linux does not support gzip -k
+    cp "${f}" "${f}.tmp"
+    gzip -f "${f}"
+    mv "${f}.tmp" "${f}"
+
+    sizeOrig=0
+    sizeGZip=0
+
+    if [[ `uname` == 'Darwin' ]]; then
+      sizeOrig=$(stat -f "%z" "${f}")
+      sizeGZip=$(stat -f "%z" "${f}.gz")
+    else
+      sizeOrig=$(stat -c "%s" "${f}")
+      sizeGZip=$(stat -c "%s" "${f}.gz")
+    fi
+
+    msg "GZipped $f [${sizeOrig} B -> ${sizeGZip} B]"
+
+    if [ ${sizeOrig} -le ${sizeGZip} ]; then
+      msg "GZipped file ${f}.gz is larger than its original file, discarding"
+      rm "${f}.gz"
+    fi
+  done
+
+)}
+
+
+
+function build {(
+
+  # Check if the jq processor is installed correctly
+  if ! command -v jq > /dev/null 2>&1; then
+    echo "jq is required. Please install!"
+    exit 1
+  fi
+
+  gzipJsonFiles
+
+  cd ${DOCKER_SERVER_DIR}
+
+  msg "Building docker image ${DOCKER_IMAGE_AND_TAG}"
+  docker build -t "${DOCKER_IMAGE_AND_TAG}" .
+  msg "Built docker image ${DOCKER_IMAGE_AND_TAG}"
+
+  cat marathon.json | jq ".container.docker.image |= \"${DOCKER_IMAGE_AND_TAG}\"" > target/marathon.json
+
+  msg "marathon.json output to ${DOCKER_SERVER_DIR}/target/marathon.json"
+
+)}
+
+function publish {
+
+  docker push "${DOCKER_IMAGE_AND_TAG}"
+
+}
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function err { local x=$? ; msg "$*" ; return $(( $x == 0 ? 1 : $x )) ;}
+function println { printf '%s\n' "$(now) $*" ;}
+function print { printf '%s ' "$(now) $*" ;}
+
+######################### Delegates to subcommands or runs main, as appropriate
+if [[ ${1:-} ]] && declare -F | cut -d' ' -f3 | fgrep -qx -- "${1:-}"
+then "$@"
+else build
+fi
diff --git a/dcos-universe/docker/server/marathon.json b/dcos-universe/docker/server/marathon.json
new file mode 100644
index 0000000..cb60593
--- /dev/null
+++ b/dcos-universe/docker/server/marathon.json
@@ -0,0 +1,36 @@
+{
+  "id": "/universe",
+  "instances": 1,
+  "cpus": 0.25,
+  "mem": 128,
+  "requirePorts": true,
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "network": "BRIDGE",
+      "image": "mesosphere/universe-server:$tag",
+      "portMappings": [
+        {
+          "containerPort": 80,
+          "hostPort": 8085,
+          "protocol": "tcp"
+        }
+      ]
+    },
+    "volumes": []
+  },
+  "healthChecks": [
+    {
+      "gracePeriodSeconds": 120,
+      "intervalSeconds": 30,
+      "maxConsecutiveFailures": 3,
+      "path": "/repo-empty-v3.json",
+      "portIndex": 0,
+      "protocol": "HTTP",
+      "timeoutSeconds": 5
+    }
+  ],
+  "constraints": [
+    ["hostname", "UNIQUE"]
+  ]
+}
diff --git a/dcos-universe/docker/server/nginx/etc/nginx/conf.d/universe.marathon.mesos.conf b/dcos-universe/docker/server/nginx/etc/nginx/conf.d/universe.marathon.mesos.conf
new file mode 100644
index 0000000..2cf13eb
--- /dev/null
+++ b/dcos-universe/docker/server/nginx/etc/nginx/conf.d/universe.marathon.mesos.conf
@@ -0,0 +1,93 @@
+server {
+  listen 80;
+  server_name universe.marathon.mesos;
+  access_log /dev/stdout;
+  error_log /dev/stdout;
+
+  gzip_static on; # nginx will automatically search for a .gz version of a file and serve it if it exists
+
+  set $dcos_release_version 1.6.1;
+
+  set $serve_json false;
+
+  if ($http_accept ~ ".*application/vnd\.dcos\.universe\.repo\+json;charset=utf-8;version=v3.*") {
+    # The Accept header contains the v3 repo schema
+    set $serve_json true;
+    set $universe_version v3;
+  }
+
+  if ($http_accept ~ ".*application/vnd\.dcos\.universe\.repo\+json;charset=utf-8;version=v4.*") {
+    # The Accept header contains the v4 repo schema
+    set $serve_json true;
+    set $universe_version v4;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.8.*") {
+    set $dcos_release_version 1.8;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.9.*") {
+    set $dcos_release_version 1.9;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.10.*") {
+    set $dcos_release_version 1.10;
+  }
+
+  if ($http_user_agent ~ ".*dcos\/1\.11.*") {
+    set $dcos_release_version 1.11;
+  }
+
+  location = /repo-1.7 {
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v3" json;
+      application/zip zip;
+    }
+    set $compound_if "";
+    if ($serve_json = true) {
+      set $compound_if "${compound_if}json";
+    }
+    if ($dcos_release_version != "1.7") {
+      set $compound_if "${compound_if}notoneseven";
+    }
+
+    if ($compound_if ~ "jsonnotoneseven") {
+      rewrite ^/repo-1.7$ /repo-empty-v3.json break;
+    }
+
+    rewrite ^/repo-1.7$ /repo-up-to-1.7.zip break;
+  }
+
+  location = /repo {
+    types {
+      application/zip zip;
+    }
+
+    if ($universe_version = v3) {
+      rewrite ^/repo$ /v3;
+    }
+
+    if ($universe_version = v4) {
+      rewrite ^/repo$ /v4;
+    }
+
+    rewrite ^/repo$ /repo-up-to-$dcos_release_version.zip break;
+  }
+
+  location = /v3 {
+    internal;
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v3" json;
+    }
+    rewrite ^/v3$ /repo-up-to-$dcos_release_version.json break;
+  }
+
+  location = /v4 {
+    internal;
+    types {
+      "application/vnd.dcos.universe.repo+json;charset=utf-8;version=v4" json;
+    }
+    rewrite ^/v4$ /repo-up-to-$dcos_release_version.json break;
+  }
+
+}
diff --git a/dcos-universe/docs/tutorial/Dockerfile b/dcos-universe/docs/tutorial/Dockerfile
new file mode 100644
index 0000000..bfc2d29
--- /dev/null
+++ b/dcos-universe/docs/tutorial/Dockerfile
@@ -0,0 +1,11 @@
+# Use an official Python runtime as a base image
+FROM python:3
+
+# Set the working directory to the package directory
+WORKDIR /package
+
+# Copy the current directory contents into the container at /package
+ADD . /package
+
+# Run helloworld.py when the container launches. -u flag makes sure the output is not buffered
+CMD ["python3", "-u", "helloworld.py"]
diff --git a/dcos-universe/docs/tutorial/GetStarted.md b/dcos-universe/docs/tutorial/GetStarted.md
new file mode 100644
index 0000000..18d8807
--- /dev/null
+++ b/dcos-universe/docs/tutorial/GetStarted.md
@@ -0,0 +1,502 @@
+# Get Started Creating a DC/OS Package
+This tutorials provides a walkthrough of creating a Universe package. The audience is developers who want to modify or *publish* packages to the Universe. This tutorial will familiarize you with package concepts and the roles of Marathon and Universe in the package life cycle. It does not go into great depth on some of the conceptual or inner details. This guide aims at making a user familiar with the concepts of what a Package is and what are the roles of Marathon and Universe in the pa [...]
+
+#### Table of Contents
+- [Required nomenclature](#required-nomenclature)
+	- [What is Universe?](#what-is-universe)
+	- [What is Marathon?](#what-is-marathon)
+	- [What is a package?](#what-is-a-package)
+- [Prerequisites](#prerequisites)
+	- [Library dependencies](#library-dependencies)
+	- [Access requirements](#access-requirements)
+	- [This repository](#this-repository)
+- [Create a package](#create-a-package)
+	- [Step 1 : Create a simple Python HTTP Server](#step-1--create-a-simple-python-http-server)
+		- [Change port mapping to be dynamic](#change-port-mapping-to-be-dynamic)
+	- [Step 2 : Creating a Docker container](#step-2--creating-a-docker-container)
+		- [Build the container](#build-the-container)
+		- [Test your container](#test-your-container)
+		- [Tag and publish your container](#tag-and-publish-your-container)
+	- [Step 3 : Creating a DC/OS Package](#step-3--creating-a-dcos-package)
+		- [config.json](#configjson)
+		- [resource.json](#resourcejson)
+		- [package.json](#packagejson)
+		- [marathon.json.mustache](#marathonjsonmustache)
+	- [Step 3.1 : DC/OS Integration](#step-31--dcos-integration)
+		- [Service Endpoints](#service-endpoints)
+		- [Health Checks](#health-checks)
+	- [Step 4 : Testing the package](#step-4--testing-the-package)
+		- [Validation using build script.](#validation-using-build-script)
+		- [Build the Universe server](#build-the-universe-server)
+		- [Run the local Universe server](#run-the-local-universe-server)
+		- [Add the Universe repo to DC/OS cluster:](#add-the-universe-repo-to-dcos-cluster)
+		- [Install the package](#install-the-package)
+		- [Test the package](#test-the-package)
+	- [Step 5 : Publish the package](#step-5--publish-the-package)
+
+## Required Terminology
+
+### What is Universe?
+The Universe is a DC/OS package repository that contains services like Spark, Cassandra, Jenkins, and many others. It allows users to install these services with a single click from the DC/OS UI or by a simple `dcos package install <package_name>` command from the DC/OS CLI. Many community members have already submitted their own packages to the Universe, and anyone interested is encouraged to get involved with package development! Submitting a package is a great way to contribute to the [...]
+
+
+### What is Marathon?
+You can use Marathon to deploy applications to DC/OS. [Marathon](https://mesosphere.github.io/marathon/) is a production-grade container orchestration platform for Mesosphere’s Datacenter Operating System (DC/OS) and [Apache Mesos](https://mesos.apache.org/). In order to deploy applications on top of Mesos, one can use Marathon for Mesos. Marathon is a cluster-wide init and control system for running Linux services in cgroups and Docker containers. Marathon has a number of different depl [...]
+
+
+### What is a package?
+There are several ways to deploy your service onto a running DC/OS cluster.
+  * Use the DC/OS Marathon command in the CLI.
+  * Use the Marathon REST API directly.
+  * Deploy your service as a package.
+
+Deploying your service using the package approach makes your life easier and service management efficient. After you have a running DC/OS, you can browse packages in the GUI [dashboard](https://docs.mesosphere.com/latest/gui/#universe). A package consists of the four required configuration files (`config.json`, `package.json`, `resource.json`, and `marathon.json.mustache`) and all of the external files linked from them.
+
+A package implicitly relies on Marathon; its contents are used to generate a Marathon app definition. By the end of this guide, you will be able to build, publish, and browse your package in the cluster.
+
+
+## Prerequisites
+Before starting this guide, make sure you have the following prerequisites.
+
+### Library dependencies
+* [DC/OS CLI](https://dcos.io/docs/latest/cli/install/) installed and configured.
+* [jq](https://stedolan.github.io/jq/download/) is installed in your environment.
+* `python3` in your environment.
+* Docker is installed.
+
+### Access requirements
+* Access to a running [DC/OS](https://dcos.io/install/).
+* Mesos needs access to the Docker registry that has your Universe Server. In this guide, Docker Hub is used as the Docker registry.
+
+
+### This repository
+- This guide uses the packaging version v4. This guide will be updated as and when a new version is released.
+- The packages are located in `repo/packages` directory.
+- This tutorial is in `docs/tutorial` directory
+- You can refer to **schemas** in `repo/meta/schema` directory. This directory has
+  - `config-schema.json` that refers to the schema of the `config.json`
+  - `package-schema.json` that refers to the schema of the `package.json`
+  - `v3-resource-schema.json` that refers to the schema of the `resource.json` for the v3 and v4 packages
+  - `*-repo-schema.json` files are not meant to be used by package developers; they instead define the schema for the API between Universe and DC/OS.
+
+## Create a package
+In this step, you build a package that provides a Python server as a DC/OS service. The Python server receives an HTTP GET request and responds with the current time at the server.
+
+### Step 1 : Create a simple Python HTTP Server
+For the purposes of this guide, Python 3 the HTTPServer module provided by its standard library is used. Create a file called `helloworld.py` in an empty directory called `time-server-service`.
+
+```python
+import time
+import http.server
+import os
+
+
+HOST_NAME = '0.0.0.0' # Host name of the HTTP server
+# Gets the port number from $PORT0 environment variable
+PORT_NUMBER = 8000
+
+class MyHandler(http.server.BaseHTTPRequestHandler):
+    def do_GET(s):
+        """Respond to a GET request."""
+        s.send_response(200)
+        s.send_header("Content-type", "text/html")
+        s.end_headers()
+        s.wfile.write("<html><head><title>Time Server</title></head>")
+        s.wfile.write("<body><p>The current time is {}</p>".format(time.asctime()))
+        s.wfile.write("</body></html>")
+
+if __name__ == '__main__':
+    server_class = http.server.HTTPServer
+    httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
+    print(time.asctime(), "Server Starts - {}:{}".format(HOST_NAME, PORT_NUMBER))
+    try:
+        httpd.serve_forever()
+    except KeyboardInterrupt:
+        pass
+    httpd.server_close()
+    print(time.asctime(), "Server Stops - {}:{}".format(HOST_NAME, PORT_NUMBER))
+```
+
+The code snippet simply starts a Python server and serves the GET requests with HTML that says the current time. You should be able to run this code snippet with `python3 helloworld.py` and browse [localhost:8000](http://localhost:8000).
+
+#### Change port mapping to be dynamic
+
+If you want to get a port number dynamically from available ports, Marathon provides a way to achieve this. You can access the available ports using the environment variable `$PORT0`, `$PORT1`, `$PORT2` and so on. This will be explained clearly in a later section. But for now, just change your Python snippet to read the port from an environment variable as below :
+
+```python
+# Gets the port number from $PORT0 environment variable
+PORT_NUMBER = int(os.environ['PORT0'])
+```
+
+Once you do this, you can browse your server after executing `PORT0=8000 python3 helloworld.py`
+
+### Step 2 : Creating a Docker container
+Creating a Docker container is essential to distribute _this_ service. It runs completely isolated from the host environment by default, only accessing host files and ports if configured to do so. To continue reading, you need to be familiar with Docker; this [get-started](https://docs.docker.com/get-started/) guide is recommended. You should have logged in to your Docker account in your terminal using `docker login`.
+
+_Note: Giving a Docker image is optional and you can have other ways to execute the binary (E.g.: The package `cassandra` doesn't use a Docker image to install the binary.)._
+
+Create a Docker file (named `Dockerfile`) in the `time-server-service` directory created earlier. The `Dockerfile` should look like this:
+
+```bash
+# Use an official Python runtime as a base image
+FROM python:3
+
+# Set the working directory to the package directory
+WORKDIR /package
+
+# Copy the current directory contents into the container at /package
+ADD . /package
+
+# Run helloworld.py when the container launches. -u flag makes sure the output is not buffered
+CMD ["python3", "-u", "helloworld.py"]
+```
+Read through the comments to understand what each line in the `Dockerfile` does.
+
+
+#### Build the container
+
+*Throughout the rest of this guide, refer to `docker-user-name` as your Docker user name where you access your Docker images from a registry such as Docker Hub. You are expected to replace the keyword `docker-user-name` with your Docker user name in all commands and files*
+
+Now that you have everything ready, you can build the container. Here’s what `ls` should show:
+```
+$ ls
+Dockerfile		helloworld.py
+```
+Now run the build command. This creates a Docker image which you are going to tag using `-t` so it has a friendly name.
+
+`docker build -t docker-user-name/time-server:part1 .`
+
+Where is your built image? It's in your machine's local Docker daemon:
+
+```
+$ docker images
+
+REPOSITORY                        TAG         IMAGE ID
+docker-user-name/time-server      part1       42somsoc147
+```
+
+
+#### Test your container
+When you execute `docker images`, you should be able to see the your image in the displayed list. To make sure the image is working as expected, you can run the container by executing the command below :
+
+`docker run --env PORT0=8000 -p 80:8000 -t docker-user-name/time-server:part1`
+
+**Note**: You must set the value of `PORT0` explicitly because Marathon sets this value when it launches the container, which has not happened yet.
+
+The `-p` option maps the host port 80 to the container port 8000. The  `-t` flag creates a pseudoTTY and since you unbuffered the Python standard I/O in your `Dockerfile`, you will be able to see the real time logs of the server in the console. Once you have executed the above command, you should be able to browse [localhost](http://localhost:80). You can test the url with `curl localhost:8000` and your server should return the current time.
+
+
+#### Tag and publish your container
+Once you are satisfied with the functionality of your container, you can publish the Docker image on to the Docker registry. In this case, you can execute :
+
+`docker tag time-server docker-user-name/time-server:part1`
+
+This tags your `time-server` image with the Docker repository `time-server` in your Docker user name `docker-user-name` and provides an optional tag `part1`.
+
+Once you tag your image, you have to publish (synonmous with `git push`) to the Docker registry so that Marathon will be able to discover this in the future using an URL. You can achieve this by executing the command :
+
+`docker push docker-user-name/time-server:part1`
+
+Now that you have the container ready, in the next section you will see how to create a package!
+
+### Step 3 : Creating a DC/OS Package
+In order to create a package, you need to have forked the [Universe repo](https://github.com/mesosphere/universe) and then cloned it so that it is available in your terminal. Once you do this, create a directory named `time-server` under the `repo/package/T` directory (as your package name starts with the letter "t"). Inside this directory, if there is already another package with a name of your choice, you have to name your package differently. You create all the required files in this  [...]
+
+Each package has its own directory, with one subdirectory for each package revision. Each package revision directory contains the set of files necessary to create a consumable package that can be used by a DC/OS Cluster to install the package. For example, your package will look like this:
+
+```
+└── repo/package/T/time-server
+    ├── 0
+    │   ├── config.json
+    │   ├── resource.json
+    │   ├── marathon.json.mustache
+    │   └── package.json
+    └── ...
+```
+
+In this guide, since this is the first version of the time-server, you will create the above directory structure with only one revision (with number 0) and create the required empty files. As the versions of your package grow, this number increments by one unit. Also, once package revision has been committed to Universe, it files should never be modified. A new revision must be created for any change.
+
+***Tip : When reading the schema JSON files, look for `required` JSON field to understand what fields are mandatory***
+
+#### config.json
+As the name says, this file specifies how your package can be configured. This is how your `config.json` should look:
+
+```json
+{
+  "$schema": "http://json-schema.org/schema#",
+  "properties": {
+    "service": {
+      "type": "object",
+      "description": "DC/OS service configuration properties",
+      "properties": {
+        "name": {
+          "description": "Name of this service instance",
+          "type": "string",
+          "default": "time-server"
+        },
+        "cpus": {
+          "description": "CPU shares to allocate to each service instance.",
+          "type": "number",
+          "default": 0.1,
+          "minimum": 0.1
+        },
+        "mem": {
+          "description":  "Memory to allocate to each service instance.",
+          "type": "number",
+          "default": 256.0,
+          "minimum": 128.0
+        }
+      }
+    }
+  }
+}
+```
+
+In this example, there are three main properties to be configured. The `name` is the actual name of the service running in DC/OS. The `cpus` and `mem` are the amount of CPU and Memory required for each service instance. You can read more about the various fields in this file [here](https://github.com/mesosphere/universe#configjson) or can refer to [`repo/meta/schema/config-schema.json`](/repo/meta/schema/config-schema.json) for a full fledged definition.
+
+(Note : If you need to add a config property after your package revision has been committed to Universe, you have to bump your package version and create new package. So be sure to add all the config properties that you need.)
+
+#### resource.json
+This file contains all of the externally hosted resources (e.g. Docker images, HTTP objects and images) needed to install the application. It also contains the `cli` section that can be used to allow a package to configure native CLI subcommands for several platforms and architectures.
+
+Below is the resource file that you use for the `time-server` package. You can provide the earlier published `docker-user-name/time-server:part1` image under the `docker` JSON field here. Note that giving a Docker image is optional and you can have other ways to execute the binary. As mentioned earlier, The package `cassandra` doesn't use a Docker image to install the binary; instead, it tells Marathon to run a shell command. It has all the dependencies it needs because they are specifie [...]
+
+```json
+{
+  "assets": {
+    "container": {
+      "docker": {
+        "timeserverimage": "docker-user-name/time-server:part1"
+      }
+    }
+  }
+}
+```
+
+You can put the icons related to your package and screenshots of your service if needed here. You can read more about the various fields in this file [here](https://github.com/mesosphere/universe#resourcejson) or can refer to [`repo/meta/schema/v3-resource-schema.json`](/repo/meta/schema/v3-resource-schema.json) for a full fledged definition.
+
+
+#### package.json
+Every package in Universe must have a `package.json` file which specifies the high level metadata about the package.
+
+Below is a snippet that represents your time server `package.json` (a version `4.0` package). This JSON has only the mandatory fields configured. As this is your initial version, you can fill the version field to be `0.1.0`
+
+```json
+{
+  "packagingVersion": "4.0",
+  "name": "time-server",
+  "version": "0.1.0",
+  "maintainer": "https://github.com/mesosphere/universe",
+  "description": "This is a simple Python HTTP server that displays a webpage that says the current time at the server location",
+  "tags": ["python", "http", "time-server"]
+}
+```
+
+Note that the version field specifies a human-readable version of the package and this is independent of the directory number inside the `time-server` directory.
+
+You can read more about the various fields in this file [here](https://github.com/mesosphere/universe#configjson) or can see [`repo/meta/schema/package-schema.json`](/repo/meta/schema/package-schema.json) for the full JSON schema outlining what properties are available for each corresponding version of a package.
+
+
+#### marathon.json.mustache
+This file is a [mustache template](http://mustache.github.io/) that when rendered will create a
+[Marathon](http://github.com/mesosphere/marathon) app definition capable of running your service. The first level of validation is that after Mustache substitution, the result must be a JSON document. Once the JSON document is produced, it must be a valid request body for Marathon's `POST /v2/apps` endpoint ([Marathon API Documentation](https://mesosphere.github.io/marathon/docs/rest-api.html)).
+
+This is how the Marathon file should look like:
+
+```
+{
+  "id": "{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": 1,
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{resource.assets.container.docker.timeserverimage}}",
+      "network": "HOST"
+    }
+  },
+  "portDefinitions": [
+    {
+      "port": 0,
+      "protocol": "tcp"
+    }
+  ]
+}
+```
+
+The service `name`, `cpus` and `mem` are populated from the default values in the `config.json` file. The image is populated from the `resource.json` file. Here, `HOST` mode of networking is used to dynamically get a port from the available pool. Read [about Marathon ports](https://mesosphere.github.io/marathon/docs/ports.html) to understand modes in detail.
+
+If you need further examples, you can refer to the [repo/packages/H/hello-world](/repo/packages/H/hello-world) package.
+
+### Step 3.1 : DC/OS Integration
+
+By default, a DC/OS service is deployed on a [private agent node](https://dcos.io/docs/1.9/overview/concepts/#private-agent-node). To allow a user to control configuration or monitor a service, use the admin router as a reverse proxy. Admin router can proxy calls on the master node to a service on a private node.
+
+The Admin Router currently supports only one reverse proxy destination. This step is optional. If you don't want to expose your service endpoint, you can skip to the next step.
+
+#### Service Endpoints
+
+The Admin Router allows Marathon tasks to define custom service UI and HTTP endpoints, which are made available as `/service/<service-name>`. Set the following Marathon task labels to enable this:
+
+```json
+"labels": {
+    "DCOS_SERVICE_NAME": "<service-name>",
+    "DCOS_SERVICE_PORT_INDEX": "0",
+    "DCOS_SERVICE_SCHEME": "http"
+  }
+```
+
+To enable the forwarding to work reliably across task failures, we recommend co-locating the endpoints with the task. This way, if the task is restarted on another host and with different ports, Admin Router will pick up the new labels and update the routing. **Note**: Due to caching, there can be an up to 30-second delay before the new routing is working.
+
+We recommend having only a single task setting these labels for a given service name. If multiple task instances have the same service name label, Admin Router will pick one of the task instances deterministically, but this might make debugging issues more difficult.
+
+Since the paths to resources for clients connecting to Admin Router will differ from those paths the service actually has, ensure the service is configured to run behind a proxy. This often means relative paths are preferred to absolute paths. In particular, resources expected to be used by a UI should be verified to work through a proxy.
+
+Tasks running in nested [Marathon app groups](https://mesosphere.github.io/marathon/docs/application-groups.html) will be available only using their service name (i.e., `/service/<service-name>`), not by the Marathon app group name (i.e., `/service/app-group/<service-name>`).
+
+
+#### Health Checks
+
+Service health check information can be surfaced in the DC/OS services UI tab by defining one or more `healthChecks` in the Service’s Marathon template. For example:
+
+```
+"healthChecks": [
+   {
+       "path": "/",
+       "portIndex": 0,
+       "protocol": "HTTP",
+       "gracePeriodSeconds": 5,
+       "intervalSeconds": 60,
+       "timeoutSeconds": 10,
+       "maxConsecutiveFailures": 3
+   }
+]
+```
+
+See the [health checks documentation](https://mesosphere.github.io/marathon/docs/health-checks.html) for more information.
+
+In this guide, the `time-server` is not a Mesos framework. If your service is a framework and you want the tasks to show up in the UI, then you need to set the label `DCOS_PACKAGE_FRAMEWORK_NAME` to the name of the framework.
+
+```json
+"labels": {
+  "DCOS_PACKAGE_FRAMEWORK_NAME": "time-server"
+}
+ ```
+
+ In order to expose the `time-server` service as an endpoint and add health checks to it, add the above-mentioned labels. Your new `marathon.json.mustache` should look like this :
+
+ ```
+ {
+   "id": "{{service.name}}",
+   "cpus": {{service.cpus}},
+   "mem": {{service.mem}},
+   "instances": 1,
+   "container": {
+     "type": "DOCKER",
+     "docker": {
+       "image": "{{resource.assets.container.docker.timeserverimage}}",
+       "network": "HOST"
+     }
+   },
+   "portDefinitions": [
+     {
+       "port": 0,
+       "protocol": "tcp"
+     }
+   ],
+   "labels": {
+     "DCOS_SERVICE_NAME": "{{service.name}}",
+     "DCOS_SERVICE_PORT_INDEX": "0",
+     "DCOS_SERVICE_SCHEME": "http"
+   },
+   "healthChecks": [
+     {
+       "path": "/",
+       "portIndex": 0,
+       "protocol": "HTTP",
+       "gracePeriodSeconds": 5,
+       "intervalSeconds": 60,
+       "timeoutSeconds": 10,
+       "maxConsecutiveFailures": 3
+     }
+   ]
+ }
+ ```
+
+
+### Step 4 : Testing the package
+
+Now that the package is built, you need to make sure everything works as expected before publishing to the community.
+
+
+#### Validation using build script
+
+You can execute the script inside the `scripts/build.sh` to make sure all the JSON schema conform to their respective schemas and to install any missing libraries. This script is also executed as a precommit hook.
+
+It may throw some errors if there are any unrecognized fields in the package files. Fix those errors and re-execute the command until the build is successful.
+
+Now, you can build the Universe server locally, and run in the DC/OS cluster to test and install your package.
+
+Universe Server is a new component introduced alongside `packagingVersion` `3.0`. In order for Universe to be able to provide packages for many versions of DC/OS at the same time, it is necessary for a server to be responsible for serving the correct set of packages to a cluster based on the cluster's version.
+
+
+#### Build the Universe server
+
+Build the Universe Server Docker image:
+```bash
+DOCKER_IMAGE="docker-user-name/universe-server" DOCKER_TAG="time-server" docker/server/build.bash
+```
+
+This will create a Docker image `universe-server:time-server` and `docker/server/target/marathon.json` on your local machine.
+
+If you would like to publish the built Docker image to Docker Hub, run:
+```bash
+DOCKER_IMAGE="docker-user-name/universe-server" DOCKER_TAG="time-server" docker/server/build.bash publish
+```
+
+
+#### Run the local Universe server
+Using the `marathon.json` that is created when building Universe Server, you can run a Universe Server in the DC/OS cluster which can then be used to install packages.
+
+Run the following commands inside the `server/target` directory to configure DC/OS to use the custom Universe Server (DC/OS 1.8+):
+
+`dcos marathon app add marathon.json`
+
+
+#### Add the Universe repo to DC/OS cluster:
+Now that you have local Universe Server up and running, add it to the cluster's package manager as a repository. You can do this from the GUI or CLI. To achieve this from CLI, execute:
+
+`dcos package repo add --index=0 dev-universe http://universe.marathon.mesos:8085/repo`
+
+
+#### Install the package
+- You can search for your package using something like:
+
+    `dcos package search time`
+- Once you have found your `time-server` package, you can install it onto your cluster using
+
+    `dcos package install time-server`
+- Install the package and if everything works, you have successfully created a package, tested and deployed it! You can check if your package is running at
+
+    `dcos package list`
+
+- You can browse your endpoint by going to the cluster dashboard and clicking on Services > time-server, you will be able to see a current running task and you can click on any running task to view the endpoint URL.
+
+
+#### Test the package
+
+If you have followed the earlier instructions in configuring a [service endpoint](#service-endpoints), then you can test the url with `curl` command using:
+
+    `curl https://<DC/OS-Cluster>/service/time-server`
+
+or just open up the url in your favorite browser and you should be able to see the current time.
+
+
+Now continue to the next step to publish your package to the DC/OS community.
+
+
+### Step 5 : Publish the package
+To add a package into the Universe, you will need to create a Pull Request against the `mesosphere/universe` repo, `version-3.x` branch. Once you have raised a PR, the CI(Continuous Integration) kicks in and runs automated tests to make sure everything works together. Once the CI passes all the automated tests, mesosphere reviews the PR and merges them once they are ready and your package will be ready to :rocket:.
+
+All Pull Requests opened for Universe and the `version-3.x` branch will have their Universe Server Docker image built and published to the DockerHub image [`mesosphere/universe-server`](https://hub.docker.com/r/mesosphere/universe-server/). In the artifacts tab of the build results you can find `docker/server/marathon.json` which can be used to run the Universe Server for testing in your DC/OS cluster.  For each Pull Request, click the details link of the "Universe Server Docker image" s [...]
diff --git a/dcos-universe/docs/tutorial/helloworld.py b/dcos-universe/docs/tutorial/helloworld.py
new file mode 100755
index 0000000..8f6a4f7
--- /dev/null
+++ b/dcos-universe/docs/tutorial/helloworld.py
@@ -0,0 +1,30 @@
+import time
+import http.server
+import os
+
+
+HOST_NAME = '0.0.0.0' # Host name of the http server
+# Gets the port number from $PORT0 environment variable
+PORT_NUMBER = int(os.environ['PORT0'])
+
+
+class MyHandler(http.server.BaseHTTPRequestHandler):
+    def do_GET(s):
+        """Respond to a GET request."""
+        s.send_response(200)
+        s.send_header("Content-type", "text/html")
+        s.end_headers()
+        s.wfile.write("<html><head><title>Time Server</title></head>".encode())
+        s.wfile.write("<body><p>The current time is {}</p>".format(time.asctime()).encode())
+        s.wfile.write("</body></html>".encode())
+
+if __name__ == '__main__':
+    server_class = http.server.HTTPServer
+    httpd = server_class((HOST_NAME, PORT_NUMBER), MyHandler)
+    print(time.asctime(), "Server Starts - {}:{}".format(HOST_NAME, PORT_NUMBER))
+    try:
+        httpd.serve_forever()
+    except KeyboardInterrupt:
+        pass
+    httpd.server_close()
+    print(time.asctime(), "Server Stops - {}:{}".format(HOST_NAME, PORT_NUMBER))
diff --git a/dcos-universe/docs/tutorial/time-server/0/config.json b/dcos-universe/docs/tutorial/time-server/0/config.json
new file mode 100644
index 0000000..d3ed39f
--- /dev/null
+++ b/dcos-universe/docs/tutorial/time-server/0/config.json
@@ -0,0 +1,28 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "properties": {
+    "service": {
+      "type": "object",
+      "description": "DC/OS service configuration properties",
+      "properties": {
+        "name": {
+          "description": "Name of this service instance",
+          "type": "string",
+          "default": "time-server"
+        },
+        "cpus": {
+          "description": "CPU shares to allocate to each service instance.",
+          "type": "number",
+          "default": 0.1,
+          "minimum": 0.1
+        },
+        "mem": {
+          "description":  "Memory to allocate to each service instance.",
+          "type": "number",
+          "default": 256.0,
+          "minimum": 128.0
+        }
+      }
+    }
+  }
+}
diff --git a/dcos-universe/docs/tutorial/time-server/0/marathon.json.mustache b/dcos-universe/docs/tutorial/time-server/0/marathon.json.mustache
new file mode 100644
index 0000000..7c14021
--- /dev/null
+++ b/dcos-universe/docs/tutorial/time-server/0/marathon.json.mustache
@@ -0,0 +1,35 @@
+{
+  "id": "{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": 1,
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{resource.assets.container.docker.timeserverimage}}",
+      "network": "HOST"
+    }
+  },
+  "portDefinitions": [
+    {
+      "port": 0,
+      "protocol": "tcp"
+    }
+  ],
+  "labels": {
+    "DCOS_SERVICE_NAME": "{{service.name}}",
+    "DCOS_SERVICE_PORT_INDEX": "0",
+    "DCOS_SERVICE_SCHEME": "http"
+  },
+  "healthChecks": [
+     {
+         "path": "/",
+         "portIndex": 0,
+         "protocol": "HTTP",
+         "gracePeriodSeconds": 5,
+         "intervalSeconds": 60,
+         "timeoutSeconds": 10,
+         "maxConsecutiveFailures": 3
+     }
+  ]
+}
diff --git a/dcos-universe/docs/tutorial/time-server/0/package.json b/dcos-universe/docs/tutorial/time-server/0/package.json
new file mode 100644
index 0000000..2beafbe
--- /dev/null
+++ b/dcos-universe/docs/tutorial/time-server/0/package.json
@@ -0,0 +1,8 @@
+{
+  "packagingVersion": "4.0",
+  "name": "time-server",
+  "version": "1.0.0",
+  "maintainer": "https://github.com/mesosphere/universe",
+  "description": "This is a simple python http server that displays a webpage which says the current time at the server location.",
+  "tags": ["http", "time-server", "basichttp"]
+}
diff --git a/dcos-universe/docs/tutorial/time-server/0/resource.json b/dcos-universe/docs/tutorial/time-server/0/resource.json
new file mode 100644
index 0000000..b770165
--- /dev/null
+++ b/dcos-universe/docs/tutorial/time-server/0/resource.json
@@ -0,0 +1,9 @@
+{
+  "assets": {
+    "container": {
+      "docker": {
+        "timeserverimage": "docker-user-name/time-server:part1"
+      }
+    }
+  }
+}
diff --git a/dcos-universe/hooks/pre-commit b/dcos-universe/hooks/pre-commit
new file mode 100755
index 0000000..60e786d
--- /dev/null
+++ b/dcos-universe/hooks/pre-commit
@@ -0,0 +1,10 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+echo "RUNNING PRE-COMMIT";
+
+GIT_HOOKS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
+UNIVERSE_DIR=$GIT_HOOKS_DIR/../..
+SCRIPTS_DIR=$UNIVERSE_DIR/scripts
+
+$SCRIPTS_DIR/build.sh
diff --git a/dcos-universe/repo/meta/schema/build-definition-schema.json b/dcos-universe/repo/meta/schema/build-definition-schema.json
new file mode 100644
index 0000000..9168714
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/build-definition-schema.json
@@ -0,0 +1,447 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+
+  "definitions": {
+
+    "localReference": {
+      "type": "string",
+      "pattern": "^@"
+    },
+
+    "dcosReleaseVersion": {
+      "type": "string",
+      "pattern": "^(?:0|[1-9][0-9]*)(?:\\.(?:0|[1-9][0-9]*))*$",
+      "description": "A string representation of a DC/OS Release Version"
+    },
+
+    "url": {
+      "type": "string",
+      "pattern": "."
+    },
+
+    "base64String": {
+      "type": "string",
+      "pattern": "^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$"
+    },
+
+    "cliInfo": {
+      "additionalProperties": false,
+      "properties": {
+        "contentHash": {
+          "items": {
+            "$ref": "#/definitions/hash"
+          },
+          "minItems": 1,
+          "type": "array"
+        },
+        "kind": {
+          "enum": [
+            "executable",
+            "zip"
+          ],
+          "type": "string"
+        },
+        "url": {
+          "$ref": "#/definitions/url"
+        }
+      },
+      "required": [
+        "url",
+        "kind",
+        "contentHash"
+      ],
+      "type": "object"
+    },
+
+    "hash": {
+      "additionalProperties": false,
+      "properties": {
+        "algo": {
+          "enum": [
+            "sha256"
+          ],
+          "type": "string"
+        },
+        "value": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "algo",
+        "value"
+      ],
+      "type": "object"
+    },
+
+
+    "marathon": {
+      "type": "object",
+      "properties": {
+        "v2AppMustacheTemplate": {
+          "oneOf": [
+            {"$ref": "#/definitions/base64String"},
+            {"$ref": "#/definitions/localReference"}
+          ]
+        }
+      },
+      "required": [ "v2AppMustacheTemplate" ],
+      "additionalProperties": false
+    },
+
+    "v30resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "cli": {
+          "additionalProperties": false,
+          "properties": {
+            "binaries": {
+              "additionalProperties": false,
+              "minProperties": 1,
+              "properties": {
+                "darwin": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "linux": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "windows": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                }
+              },
+              "type": "object"
+            }
+          },
+          "required": [
+              "binaries"
+          ],
+          "type": "object"
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "config": {
+      "$ref": "http://json-schema.org/draft-04/schema#"
+    },
+
+
+    "command": {
+      "additionalProperties": false,
+      "required": ["pip"],
+      "properties": {
+        "pip": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "title": "Embedded Requirements File",
+          "description": "[Deprecated v3.x] An array of strings representing of the requirements file to use for installing the subcommand for Pip. Each item is interpreted as a line in the requirements file."
+        }
+      }
+    },
+
+
+    "v30BuildDef": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["3.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "oneOf": [
+            {"$ref": "#/definitions/v30resource"},
+            {"$ref": "#/definitions/localReference"}
+          ]
+        },
+        "config": {
+          "oneOf": [
+            {"$ref": "#/definitions/config"},
+            {"$ref": "#/definitions/localReference"}
+          ]
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v40BuildDef": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["4.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "upgradesFrom": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that can upgrade to this package. If the property is a list containing the string '*', any version can upgrade to this package. If the property is not set or the empty list, no version can upgrade to this package."
+        },
+        "downgradesTo": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that this package can downgrade to. If the property is a list containing the string '*', this package can downgrade to any version. If the property is not set or the empty list, this package cannot downgrade."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "oneOf": [
+            {"$ref": "#/definitions/v30resource"},
+            {"$ref": "#/definitions/localReference"}
+          ]
+        },
+        "config": {
+          "oneOf": [
+            {"$ref": "#/definitions/config"},
+            {"$ref": "#/definitions/localReference"}
+          ]
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    }
+
+  },
+
+  "type": "object",
+  "oneOf": [
+    { "$ref": "#/definitions/v30BuildDef" },
+    { "$ref": "#/definitions/v40BuildDef" }
+  ]
+}
diff --git a/dcos-universe/repo/meta/schema/command-schema.json b/dcos-universe/repo/meta/schema/command-schema.json
new file mode 100644
index 0000000..dbb355b
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/command-schema.json
@@ -0,0 +1,20 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "oneOf": [
+    {
+      "type": "object",
+      "properties": {
+        "pip": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "title": "Embedded Requirements File",
+          "description": "An array of strings representing of the requirements file to use for installing the subcommand for Pip. Each item is interpreted as a line in the requirements file."
+        }
+      },
+      "additionalProperties": false,
+      "required": ["pip"]
+    }
+  ]
+}
diff --git a/dcos-universe/repo/meta/schema/config-schema.json b/dcos-universe/repo/meta/schema/config-schema.json
new file mode 100644
index 0000000..85eb502
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/config-schema.json
@@ -0,0 +1,150 @@
+{
+    "id": "http://json-schema.org/draft-04/schema#",
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "description": "Core schema meta-schema",
+    "definitions": {
+        "schemaArray": {
+            "type": "array",
+            "minItems": 1,
+            "items": { "$ref": "#" }
+        },
+        "positiveInteger": {
+            "type": "integer",
+            "minimum": 0
+        },
+        "positiveIntegerDefault0": {
+            "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ]
+        },
+        "simpleTypes": {
+            "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ]
+        },
+        "stringArray": {
+            "type": "array",
+            "items": { "type": "string" },
+            "minItems": 1,
+            "uniqueItems": true
+        }
+    },
+    "type": "object",
+    "properties": {
+        "id": {
+            "type": "string",
+            "format": "uri"
+        },
+        "$schema": {
+            "type": "string",
+            "format": "uri"
+        },
+        "title": {
+            "type": "string"
+        },
+        "description": {
+            "type": "string"
+        },
+        "default": {},
+        "multipleOf": {
+            "type": "number",
+            "minimum": 0,
+            "exclusiveMinimum": true
+        },
+        "maximum": {
+            "type": "number"
+        },
+        "exclusiveMaximum": {
+            "type": "boolean",
+            "default": false
+        },
+        "minimum": {
+            "type": "number"
+        },
+        "exclusiveMinimum": {
+            "type": "boolean",
+            "default": false
+        },
+        "maxLength": { "$ref": "#/definitions/positiveInteger" },
+        "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" },
+        "pattern": {
+            "type": "string",
+            "format": "regex"
+        },
+        "additionalItems": {
+            "anyOf": [
+                { "type": "boolean" },
+                { "$ref": "#" }
+            ],
+            "default": {}
+        },
+        "items": {
+            "anyOf": [
+                { "$ref": "#" },
+                { "$ref": "#/definitions/schemaArray" }
+            ],
+            "default": {}
+        },
+        "maxItems": { "$ref": "#/definitions/positiveInteger" },
+        "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" },
+        "uniqueItems": {
+            "type": "boolean",
+            "default": false
+        },
+        "maxProperties": { "$ref": "#/definitions/positiveInteger" },
+        "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" },
+        "required": { "$ref": "#/definitions/stringArray" },
+        "additionalProperties": {
+            "anyOf": [
+                { "type": "boolean" },
+                { "$ref": "#" }
+            ],
+            "default": {}
+        },
+        "definitions": {
+            "type": "object",
+            "additionalProperties": { "$ref": "#" },
+            "default": {}
+        },
+        "properties": {
+            "type": "object",
+            "additionalProperties": { "$ref": "#" },
+            "default": {}
+        },
+        "patternProperties": {
+            "type": "object",
+            "additionalProperties": { "$ref": "#" },
+            "default": {}
+        },
+        "dependencies": {
+            "type": "object",
+            "additionalProperties": {
+                "anyOf": [
+                    { "$ref": "#" },
+                    { "$ref": "#/definitions/stringArray" }
+                ]
+            }
+        },
+        "enum": {
+            "type": "array",
+            "minItems": 1,
+            "uniqueItems": true
+        },
+        "type": {
+            "anyOf": [
+                { "$ref": "#/definitions/simpleTypes" },
+                {
+                    "type": "array",
+                    "items": { "$ref": "#/definitions/simpleTypes" },
+                    "minItems": 1,
+                    "uniqueItems": true
+                }
+            ]
+        },
+        "allOf": { "$ref": "#/definitions/schemaArray" },
+        "anyOf": { "$ref": "#/definitions/schemaArray" },
+        "oneOf": { "$ref": "#/definitions/schemaArray" },
+        "not": { "$ref": "#" }
+    },
+    "dependencies": {
+        "exclusiveMaximum": [ "maximum" ],
+        "exclusiveMinimum": [ "minimum" ]
+    },
+    "default": {}
+}
diff --git a/dcos-universe/repo/meta/schema/metadata-schema.json b/dcos-universe/repo/meta/schema/metadata-schema.json
new file mode 100644
index 0000000..e97350d
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/metadata-schema.json
@@ -0,0 +1,426 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+
+  "definitions": {
+
+    "dcosReleaseVersion": {
+      "type": "string",
+      "pattern": "^(?:0|[1-9][0-9]*)(?:\\.(?:0|[1-9][0-9]*))*$",
+      "description": "A string representation of a DC/OS Release Version"
+    },
+
+    "url": {
+      "type": "string",
+      "pattern": "."
+    },
+
+    "base64String": {
+      "type": "string",
+      "pattern": "^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$"
+    },
+
+    "cliInfo": {
+      "additionalProperties": false,
+      "properties": {
+        "contentHash": {
+          "items": {
+            "$ref": "#/definitions/hash"
+          },
+          "minItems": 1,
+          "type": "array"
+        },
+        "kind": {
+          "enum": [
+            "executable",
+            "zip"
+          ],
+          "type": "string"
+        },
+        "url": {
+          "$ref": "#/definitions/url"
+        }
+      },
+      "required": [
+        "url",
+        "kind",
+        "contentHash"
+      ],
+      "type": "object"
+    },
+
+    "hash": {
+      "additionalProperties": false,
+      "properties": {
+        "algo": {
+          "enum": [
+            "sha256"
+          ],
+          "type": "string"
+        },
+        "value": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "algo",
+        "value"
+      ],
+      "type": "object"
+    },
+
+
+    "marathon": {
+      "type": "object",
+      "properties": {
+        "v2AppMustacheTemplate": {
+          "$ref": "#/definitions/base64String"
+        }
+      },
+      "required": [ "v2AppMustacheTemplate" ],
+      "additionalProperties": false
+    },
+
+    "v30resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "cli": {
+          "additionalProperties": false,
+          "properties": {
+            "binaries": {
+              "additionalProperties": false,
+              "minProperties": 1,
+              "properties": {
+                "darwin": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "linux": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "windows": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                }
+              },
+              "type": "object"
+            }
+          },
+          "required": [
+              "binaries"
+          ],
+          "type": "object"
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "config": {
+      "$ref": "http://json-schema.org/draft-04/schema#"
+    },
+
+
+    "command": {
+      "additionalProperties": false,
+      "required": ["pip"],
+      "properties": {
+        "pip": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "title": "Embedded Requirements File",
+          "description": "[Deprecated v3.x] An array of strings representing of the requirements file to use for installing the subcommand for Pip. Each item is interpreted as a line in the requirements file."
+        }
+      }
+    },
+
+    "v30Metadata": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["3.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v30resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v40Metadata": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["4.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "upgradesFrom": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that can upgrade to this package. If the property is a list containing the string '*', any version can upgrade to this package. If the property is not set or the empty list, no version can upgrade to this package."
+        },
+        "downgradesTo": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that this package can downgrade to. If the property is a list containing the string '*', this package can downgrade to any version. If the property is not set or the empty list, this package cannot downgrade."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v30resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    }
+
+  },
+
+  "type": "object",
+  "oneOf": [
+    { "$ref": "#/definitions/v30Metadata" },
+    { "$ref": "#/definitions/v40Metadata" }
+  ]
+}
diff --git a/dcos-universe/repo/meta/schema/package-schema.json b/dcos-universe/repo/meta/schema/package-schema.json
new file mode 100644
index 0000000..bb8bb2a
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/package-schema.json
@@ -0,0 +1,339 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+
+  "definitions": {
+
+    "dcosReleaseVersion": {
+      "type": "string",
+      "pattern": "^(?:0|[1-9][0-9]*)(?:\\.(?:0|[1-9][0-9]*))*$",
+      "description": "A string representation of a DC/OS Release Version"
+    },
+
+    "url": {
+      "type": "string",
+      "allOf": [
+        { "format": "uri" },
+        { "pattern": "^https?://" }
+      ]
+    },
+
+
+    "v20Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["2.0"],
+          "description": "The version of the packaging format. Please do not use this version. It is only here for backwards compatibility. We recommend you use the latest version."
+        },
+        "name": {
+          "type": "string",
+          "pattern": "^[a-z][a-z0-9-]*[a-z0-9]$",
+          "description": "The name of the package."
+        },
+        "version": {
+          "type": "string",
+          "description": "The version of the package. We recommend that you use semantic versions. See http://semver.org/ for more details on semantic versions."
+        },
+        "scm": {
+          "type": "string",
+          "description": "Source control management. This is generally the URL of the project GitHub repository."
+        },
+        "maintainer": {
+          "type": "string",
+          "description": "The maintainer of the package. This is generally an email address."
+        },
+        "website": {
+          "type": "string",
+          "description": "URL to the website of a project. This is generally the URL of the project GitHub repository."
+        },
+        "description": {
+          "type": "string",
+          "description": "Detailed description of what the package does."
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          },
+          "description": "A list of keywords associated with the package."
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v30Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["3.0"],
+          "description": "The version of the packaging format."
+        },
+        "name": {
+          "type": "string",
+          "pattern": "^[a-z][a-z0-9-]*[a-z0-9]$",
+          "description": "The name of the package."
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$",
+          "description": "The version of the package. We recommend that you use semantic versions. See http://semver.org/ for details on semantic versions."
+        },
+        "scm": {
+          "type": "string",
+          "description": "Source control management. This is generally the URL of the project GitHub repository."
+        },
+        "maintainer": {
+          "type": "string",
+          "description": "The maintainer of the package. This is generally an email address."
+        },
+        "website": {
+          "type": "string",
+          "description": "URL to the website of a project. This is generally the URL of the project GitHub repository."
+        },
+        "description": {
+          "type": "string",
+          "description": "Detailed description of what the package does."
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          },
+          "description": "A list of keywords associated with the package."
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v40Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["4.0"],
+          "description": "The version of the packaging format."
+        },
+        "name": {
+          "type": "string",
+          "pattern": "^[a-z][a-z0-9-]*[a-z0-9]$",
+          "description": "The name of the package."
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$",
+          "description": "The version of the package. We recommend that you use semantic versions. See http://semver.org/ for details on semantic versions."
+        },
+        "scm": {
+          "type": "string",
+          "description": "Source control management. This is generally the URL of the project GitHub repository."
+        },
+        "maintainer": {
+          "type": "string",
+          "description": "The maintainer of the package. This is generally an email address."
+        },
+        "website": {
+          "type": "string",
+          "description": "URL to the website of a project. This is generally the URL of the project GitHub repository."
+        },
+        "description": {
+          "type": "string",
+          "description": "Detailed description of what the package does."
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "upgradesFrom": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that can upgrade to this package. If the property is a list containing the string '*', any version can upgrade to this package. If the property is not set or the empty list, no version can upgrade to this package."
+        },
+        "downgradesTo": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that this package can downgrade to. If the property is a list containing the string '*', this package can downgrade to any version. If the property is not set or the empty list, this package cannot downgrade."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$",
+          "description": "A list of keywords associated with the package."
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    }
+
+  },
+
+  "type": "object",
+  "oneOf": [
+    { "$ref": "#/definitions/v20Package" },
+    { "$ref": "#/definitions/v30Package" },
+    { "$ref": "#/definitions/v40Package" }
+  ]
+
+}
diff --git a/dcos-universe/repo/meta/schema/v2-resource-schema.json b/dcos-universe/repo/meta/schema/v2-resource-schema.json
new file mode 100644
index 0000000..c6d6020
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/v2-resource-schema.json
@@ -0,0 +1,55 @@
+{
+  "additionalProperties": false,
+  "properties": {
+    "images": {
+      "type": "object",
+      "properties": {
+        "screenshots": {
+          "items": {
+            "type": "string",
+            "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+          },
+          "type": "array"
+        },
+        "icon-large": {
+          "type": "string",
+          "description": "PNG icon URL, preferably 256 by 256 pixels."
+        },
+        "icon-small": {
+          "type": "string",
+          "description": "PNG icon URL, preferably 48 by 48 pixels."
+        },
+        "icon-medium": {
+          "type": "string",
+          "description": "PNG icon URL, preferably 128 by 128 pixels."
+        }
+      },
+      "additionalProperties": false
+    },
+    "assets": {
+      "type": "object",
+      "properties": {
+        "container": {
+          "type": "object",
+          "properties": {
+            "docker": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            }
+          },
+          "additionalProperties": false
+        },
+        "uris": {
+          "type": "object",
+          "additionalProperties": {
+            "type": "string"
+          }
+        }
+      },
+      "additionalProperties": false
+    }
+  },
+  "$schema": "http://json-schema.org/draft-04/schema#"
+}
diff --git a/dcos-universe/repo/meta/schema/v3-repo-schema.json b/dcos-universe/repo/meta/schema/v3-repo-schema.json
new file mode 100644
index 0000000..aff778a
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/v3-repo-schema.json
@@ -0,0 +1,507 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+
+  "definitions": {
+
+
+    "dcosReleaseVersion": {
+      "type": "string",
+      "pattern": "^(?:0|[1-9][0-9]*)(?:\\.(?:0|[1-9][0-9]*))*$",
+      "description": "A string representation of a DC/OS Release Version"
+    },
+
+    "url": {
+      "type": "string",
+      "allOf": [
+        { "format": "uri" },
+        { "pattern": "^https?://" }
+      ]
+    },
+
+    "base64String": {
+      "type": "string",
+      "pattern": "^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$"
+    },
+
+    "cliInfo": {
+      "additionalProperties": false,
+      "properties": {
+        "contentHash": {
+          "items": {
+            "$ref": "#/definitions/hash"
+          },
+          "minItems": 1,
+          "type": "array"
+        },
+        "kind": {
+          "enum": [
+            "executable",
+            "zip"
+          ],
+          "type": "string"
+        },
+        "url": {
+          "$ref": "#/definitions/url"
+        }
+      },
+      "required": [
+        "url",
+        "kind",
+        "contentHash"
+      ],
+      "type": "object"
+    },
+
+    "hash": {
+      "additionalProperties": false,
+      "properties": {
+        "algo": {
+          "enum": [
+            "sha256"
+          ],
+          "type": "string"
+        },
+        "value": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "algo",
+        "value"
+      ],
+      "type": "object"
+    },
+
+
+    "marathon": {
+      "type": "object",
+      "properties": {
+        "v2AppMustacheTemplate": {
+          "$ref": "#/definitions/base64String"
+        }
+      },
+      "required": [ "v2AppMustacheTemplate" ],
+      "additionalProperties": false
+    },
+
+
+
+    "v20resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "v30resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "cli": {
+          "additionalProperties": false,
+          "properties": {
+            "binaries": {
+              "additionalProperties": false,
+              "minProperties": 1,
+              "properties": {
+                "darwin": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "linux": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "windows": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                }
+              },
+              "type": "object"
+            }
+          },
+          "required": [
+              "binaries"
+          ],
+          "type": "object"
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "config": {
+      "$ref": "http://json-schema.org/draft-04/schema#"
+    },
+
+
+    "command": {
+      "additionalProperties": false,
+      "required": ["pip"],
+      "properties": {
+        "pip": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "title": "Embedded Requirements File",
+          "description": "[Deprecated v3.x] An array of strings representing of the requirements file to use for installing the subcommand for Pip. Each item is interpreted as a line in the requirements file."
+        }
+      }
+    },
+
+
+
+    "v20Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["2.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string"
+        },
+        "releaseVersion": {
+          "type": "integer",
+          "description": "Corresponds to the revision index from the universe directory structure",
+          "minimum": 0
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v20resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v30Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["3.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "releaseVersion": {
+          "type": "integer",
+          "description": "Corresponds to the revision index from the universe directory structure",
+          "minimum": 0
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v30resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "releaseVersion",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    }
+
+  },
+
+  "type": "object",
+  "properties": {
+    "packages": {
+      "type": "array",
+      "description": "The list of packages in the repo",
+      "items": {
+        "oneOf": [
+          { "$ref": "#/definitions/v20Package" },
+          { "$ref": "#/definitions/v30Package" }
+        ]
+      }
+    }
+  },
+  "required": [
+    "packages"
+  ],
+  "additionalProperties": false
+}
diff --git a/dcos-universe/repo/meta/schema/v3-resource-schema.json b/dcos-universe/repo/meta/schema/v3-resource-schema.json
new file mode 100644
index 0000000..f5c5d5d
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/v3-resource-schema.json
@@ -0,0 +1,172 @@
+{
+  "additionalProperties": false,
+  "definitions": {
+    "cliInfo": {
+      "required": [
+        "url",
+        "kind",
+        "contentHash"
+      ],
+      "properties": {
+        "url": {
+          "type": "string",
+          "description": "URL for the binary CLI."
+        },
+        "contentHash": {
+          "items": {
+            "$ref": "#/definitions/hash"
+          },
+          "minItems": 1,
+          "type": "array",
+          "description": "List of hashes for the binary CLI that are described in the URL property."
+        },
+        "kind": {
+          "enum": [
+            "executable",
+            "zip"
+          ],
+          "type": "string",
+          "description": "The format of the CLI."
+        }
+      },
+      "additionalProperties": false,
+      "type": "object",
+      "description": "Schema for adding a supported CLI to your package."
+    },
+    "hash": {
+      "required": [
+        "algo",
+        "value"
+      ],
+      "properties": {
+        "algo": {
+          "enum": [
+            "sha256"
+          ],
+          "type": "string",
+          "description": "The algorithm used to compute the hash."
+        },
+        "value": {
+          "type": "string",
+          "description": "The value of the hash."
+        }
+      },
+      "additionalProperties": false,
+      "type": "object",
+      "description": "CLI validation."
+    }
+  },
+  "properties": {
+    "assets": {
+      "properties": {
+        "uris": {
+          "additionalProperties": {
+            "type": "string"
+          },
+          "type": "object",
+          "description": "This is a mapping of aliases to HTTP resources. The aliases enumerated here are available during resolution of the marathon.json.mustache file."
+        },
+        "container": {
+          "properties": {
+            "docker": {
+              "additionalProperties": {
+                "type": "string"
+              },
+              "type": "object"
+            }
+          },
+          "additionalProperties": false,
+          "type": "object",
+          "description": "This is a mapping of aliases to Docker images. The aliases enumerated here are available during resolution of marathon.json.mustache file."
+        }
+      },
+      "additionalProperties": false,
+      "type": "object",
+      "description": "Object that enumerates all of the required package assets."
+    },
+    "images": {
+      "properties": {
+        "icon-small": {
+          "description": "PNG icon URL, preferably 48 by 48 pixels.",
+          "type": "string"
+        },
+        "icon-large": {
+          "description": "PNG icon URL, preferably 256 by 256 pixels.",
+          "type": "string"
+        },
+        "screenshots": {
+          "items": {
+            "description": "PNG screen URL, preferably 1024 by 1024 pixels.",
+            "type": "string"
+          },
+          "type": "array"
+        },
+        "icon-medium": {
+          "description": "PNG icon URL, preferably 128 by 128 pixels.",
+          "type": "string"
+        }
+      },
+      "additionalProperties": false,
+      "type": "object",
+      "description": "Images used by DC/OS to display the package."
+    },
+    "cli": {
+      "required": [
+        "binaries"
+      ],
+      "properties": {
+        "binaries": {
+          "minProperties": 1,
+          "properties": {
+            "linux": {
+              "required": [
+                "x86-64"
+              ],
+              "properties": {
+                "x86-64": {
+                  "$ref": "#/definitions/cliInfo"
+                }
+              },
+              "additionalProperties": false,
+              "type": "object",
+              "description": "Schema for the Linux CLI."
+            },
+            "windows": {
+              "required": [
+                "x86-64"
+              ],
+              "properties": {
+                "x86-64": {
+                  "$ref": "#/definitions/cliInfo"
+                }
+              },
+              "additionalProperties": false,
+              "type": "object",
+              "description": "Schema for the Windows CLI."
+            },
+            "darwin": {
+              "required": [
+                "x86-64"
+              ],
+              "properties": {
+                "x86-64": {
+                  "$ref": "#/definitions/cliInfo"
+                }
+              },
+              "additionalProperties": false,
+              "type": "object",
+              "description": "Schema for the MacOS (darwin) CLI."
+            }
+          },
+          "additionalProperties": false,
+          "type": "object",
+          "description": "Enumerates all of the available binaries in this package."
+        }
+      },
+      "additionalProperties": false,
+      "type": "object",
+      "description": "Object that describes all of the required externally hosted assets for this package."
+    }
+  },
+  "$schema": "http://json-schema.org/draft-04/schema#"
+}
diff --git a/dcos-universe/repo/meta/schema/v4-repo-schema.json b/dcos-universe/repo/meta/schema/v4-repo-schema.json
new file mode 100644
index 0000000..2a26772
--- /dev/null
+++ b/dcos-universe/repo/meta/schema/v4-repo-schema.json
@@ -0,0 +1,629 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+
+  "definitions": {
+
+
+    "dcosReleaseVersion": {
+      "type": "string",
+      "pattern": "^(?:0|[1-9][0-9]*)(?:\\.(?:0|[1-9][0-9]*))*$",
+      "description": "A string representation of a DC/OS Release Version"
+    },
+
+    "url": {
+      "type": "string",
+      "allOf": [
+        { "format": "uri" },
+        { "pattern": "^https?://" }
+      ]
+    },
+
+    "base64String": {
+      "type": "string",
+      "pattern": "^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$"
+    },
+
+    "cliInfo": {
+      "additionalProperties": false,
+      "properties": {
+        "contentHash": {
+          "items": {
+            "$ref": "#/definitions/hash"
+          },
+          "minItems": 1,
+          "type": "array"
+        },
+        "kind": {
+          "enum": [
+            "executable",
+            "zip"
+          ],
+          "type": "string"
+        },
+        "url": {
+          "$ref": "#/definitions/url"
+        }
+      },
+      "required": [
+        "url",
+        "kind",
+        "contentHash"
+      ],
+      "type": "object"
+    },
+
+    "hash": {
+      "additionalProperties": false,
+      "properties": {
+        "algo": {
+          "enum": [
+            "sha256"
+          ],
+          "type": "string"
+        },
+        "value": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "algo",
+        "value"
+      ],
+      "type": "object"
+    },
+
+
+    "marathon": {
+      "type": "object",
+      "properties": {
+        "v2AppMustacheTemplate": {
+          "$ref": "#/definitions/base64String"
+        }
+      },
+      "required": [ "v2AppMustacheTemplate" ],
+      "additionalProperties": false
+    },
+
+
+
+    "v20resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "v30resource": {
+      "additionalProperties": false,
+      "type": "object",
+      "properties": {
+        "assets": {
+          "type": "object",
+          "properties": {
+            "uris": {
+              "type": "object",
+              "additionalProperties": {
+                "type": "string"
+              }
+            },
+            "container": {
+              "type": "object",
+              "properties": {
+                "docker": {
+                  "type": "object",
+                  "additionalProperties": {
+                    "type": "string"
+                  }
+                }
+              },
+              "additionalProperties": false
+            }
+          },
+          "additionalProperties": false
+        },
+        "cli": {
+          "additionalProperties": false,
+          "properties": {
+            "binaries": {
+              "additionalProperties": false,
+              "minProperties": 1,
+              "properties": {
+                "darwin": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "linux": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                },
+                "windows": {
+                  "additionalProperties": false,
+                  "properties": {
+                    "x86-64": {
+                      "$ref": "#/definitions/cliInfo"
+                    }
+                  },
+                  "required": [
+                    "x86-64"
+                  ],
+                  "type": "object"
+                }
+              },
+              "type": "object"
+            }
+          },
+          "required": [
+              "binaries"
+          ],
+          "type": "object"
+        },
+        "images": {
+          "type": "object",
+          "properties": {
+            "icon-small": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 48 by 48 pixels."
+            },
+            "icon-medium": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 128 by 128 pixels."
+            },
+            "icon-large": {
+              "type": "string",
+              "description": "PNG icon URL, preferably 256 by 256 pixels."
+            },
+            "screenshots": {
+              "type": "array",
+              "items": {
+                "type": "string",
+                "description": "PNG screen URL, preferably 1024 by 1024 pixels."
+              }
+            }
+          },
+          "additionalProperties": false
+        }
+      }
+    },
+
+
+    "config": {
+      "$ref": "http://json-schema.org/draft-04/schema#"
+    },
+
+
+    "command": {
+      "additionalProperties": false,
+      "required": ["pip"],
+      "properties": {
+        "pip": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "title": "Embedded Requirements File",
+          "description": "[Deprecated v3.x] An array of strings representing of the requirements file to use for installing the subcommand for Pip. Each item is interpreted as a line in the requirements file."
+        }
+      }
+    },
+
+
+
+    "v20Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["2.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string"
+        },
+        "releaseVersion": {
+          "type": "integer",
+          "description": "Corresponds to the revision index from the universe directory structure",
+          "minimum": 0
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v20resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v30Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["3.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "releaseVersion": {
+          "type": "integer",
+          "description": "Corresponds to the revision index from the universe directory structure",
+          "minimum": 0
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v30resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        },
+        "command": {
+          "$ref": "#/definitions/command"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "releaseVersion",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    },
+
+    "v40Package": {
+      "properties": {
+        "packagingVersion": {
+          "type": "string",
+          "enum": ["4.0"]
+        },
+        "name": {
+          "type": "string"
+        },
+        "version": {
+          "type": "string",
+          "pattern": "^[-a-zA-Z0-9.]+$"
+        },
+        "releaseVersion": {
+          "type": "integer",
+          "description": "Corresponds to the revision index from the universe directory structure",
+          "minimum": 0
+        },
+        "scm": {
+          "type": "string"
+        },
+        "maintainer": {
+          "type": "string"
+        },
+        "website": {
+          "type": "string"
+        },
+        "description": {
+          "type": "string"
+        },
+        "framework": {
+          "type": "boolean",
+          "default": false,
+          "description": "True if this package installs a new Mesos framework."
+        },
+        "upgradesFrom": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that can upgrade to this package. If the property is a list containing the string '*', any version can upgrade to this package. If the property is not set or the empty list, no version can upgrade to this package."
+        },
+        "downgradesTo": {
+          "type": "array",
+          "items": {
+            "type": "string"
+          },
+          "description": "List of versions that this package can downgrade to. If the property is a list containing the string '*', this package can downgrade to any version. If the property is not set or the empty list, this package cannot downgrade."
+        },
+        "preInstallNotes": {
+          "type": "string",
+          "description": "Pre installation notes that would be useful to the user of this package."
+        },
+        "postInstallNotes": {
+          "type": "string",
+          "description": "Post installation notes that would be useful to the user of this package."
+        },
+        "postUninstallNotes": {
+          "type": "string",
+          "description": "Post uninstallation notes that would be useful to the user of this package."
+        },
+        "tags": {
+          "type": "array",
+          "items": {
+            "type": "string",
+            "pattern": "^[^\\s]+$"
+          }
+        },
+        "selected": {
+          "type": "boolean",
+          "description": "Flag indicating if the package is selected in search results",
+          "default": false
+        },
+        "licenses": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "properties": {
+              "name": {
+                "type": "string",
+                "description": "The name of the license. For example one of [Apache License Version 2.0 | MIT License | BSD License | Proprietary]"
+              },
+              "url": {
+                "$ref": "#/definitions/url",
+                "description": "The URL where the license can be accessed"
+              }
+            },
+            "additionalProperties": false,
+            "required": [
+              "name",
+              "url"
+            ]
+          }
+        },
+        "minDcosReleaseVersion": {
+          "$ref": "#/definitions/dcosReleaseVersion",
+          "description": "The minimum DC/OS Release Version the package can run on."
+        },
+        "marathon": {
+          "$ref": "#/definitions/marathon"
+        },
+        "resource": {
+          "$ref": "#/definitions/v30resource"
+        },
+        "config": {
+          "$ref": "#/definitions/config"
+        }
+      },
+      "required": [
+        "packagingVersion",
+        "name",
+        "version",
+        "releaseVersion",
+        "maintainer",
+        "description",
+        "tags"
+      ],
+      "additionalProperties": false
+    }
+
+
+  },
+
+  "type": "object",
+  "properties": {
+    "packages": {
+      "type": "array",
+      "description": "The list of packages in the repo",
+      "items": {
+        "oneOf": [
+          { "$ref": "#/definitions/v20Package" },
+          { "$ref": "#/definitions/v30Package" },
+          { "$ref": "#/definitions/v40Package" }
+        ]
+      }
+    }
+  },
+  "required": [
+    "packages"
+  ],
+  "additionalProperties": false
+}
diff --git a/dcos-universe/repo/packages/A/apigateway/0/config.json b/dcos-universe/repo/packages/A/apigateway/0/config.json
new file mode 100644
index 0000000..e933d7d
--- /dev/null
+++ b/dcos-universe/repo/packages/A/apigateway/0/config.json
@@ -0,0 +1,93 @@
+{
+  "properties": {
+    "service": {
+      "description": "Service configuration for API Gateway",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Display name for the service on the DC/OS dashboard",
+          "type": "string",
+          "default": "apigateway"
+        },
+        "cpus": {
+          "description": "CPU allocation for the API Gateway instance",
+          "type": "number",
+          "default": 0.5,
+          "minimum": 0.1
+        },
+        "mem": {
+          "description": "Memory (MB) allocation for the API Gateway instance",
+          "type": "number",
+          "default": 256.0,
+          "minimum": 128.0
+        },
+        "instances": {
+          "description": "Number of API Gateway instances to deploy (one per public agent)",
+          "type": "integer",
+          "default": 1
+        }
+      },
+      "required":[
+        "name",
+        "cpus",
+        "mem",
+        "instances"
+      ]
+    },
+    "environment": {
+      "description": "Environment configuration for API Gateway",
+      "properties": {
+        "marathonHost": {
+          "description": "Specify Marathon endpoint URL to include app name endpoints. This is used for service discovery.",
+          "type": "string",
+          "default": "http://marathon.mesos:8080"
+        }
+      }
+    },
+    "advanced": {
+      "description": "Advanced configuration for API Gateway service",
+      "type": "object",
+      "properties": {
+        "logLevel": {
+          "description": "Specify API Gateway log level (info or debug)",
+          "type": "string",
+          "default": "info"
+        },
+        "remoteConfigAWSKey": {
+          "description": "AWS key if using remote config with S3 source",
+          "type": "string"
+        },
+        "remoteConfigAWSSecret": {
+          "description": "AWS secret if using remote config with S3 source",
+          "type": "string"
+        },
+        "remoteConfig": {
+          "description": "Remote config sync source for all config files (s3://<path>, file://<path> or /<path>)",
+          "type": "string"
+        },
+        "remoteConfigGenerated": {
+          "description": "Remote config sync source for generated files only (s3://<path>, file://<path> or /<path>)",
+          "type": "string"
+        },
+        "volumeContainerPath": {
+          "description": "Path in container to mount for temp files",
+          "type": "string",
+          "default": "/var/tmp/apigateway"
+        },
+        "volumeHostPath": {
+          "description": "Path on host to mount for temp files",
+          "type": "string",
+          "default": "/tmp/apigateway"
+        },
+        "acceptedResourceRoles": {
+          "description": "Specify on which node to run (i.e. *, slave_public)",
+          "type": "string",
+          "default": "slave_public"
+        }
+      },
+      "required":[
+        "logLevel"
+      ]
+    }
+  }
+}
diff --git a/dcos-universe/repo/packages/A/apigateway/0/marathon.json.mustache b/dcos-universe/repo/packages/A/apigateway/0/marathon.json.mustache
new file mode 100644
index 0000000..babb675
--- /dev/null
+++ b/dcos-universe/repo/packages/A/apigateway/0/marathon.json.mustache
@@ -0,0 +1,60 @@
+{
+  "id": "/{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": {{service.instances}},
+  "constraints": [
+    ["hostname","UNIQUE"]
+  ],
+  "acceptedResourceRoles": [
+    "{{advanced.acceptedResourceRoles}}"
+  ],
+  "env": {
+    "MARATHON_HOST": "{{environment.marathonHost}}",
+    {{#advanced.remoteConfigAWSKey}}
+    "AWS_ACCESS_KEY_ID": "{{advanced.remoteConfigAWSKey}}",
+    {{/advanced.remoteConfigAWSKey}}
+    {{#advanced.remoteConfigAWSSecret}}
+    "AWS_SECRET_ACCESS_KEY": "{{advanced.remoteConfigAWSSecret}}",
+    {{/advanced.remoteConfigAWSSecret}}
+    {{#advanced.remoteConfig}}
+    "REMOTE_CONFIG": "{{advanced.remoteConfig}}",
+    {{/advanced.remoteConfig}}
+    {{#advanced.remoteConfigGenerated}}
+    "REMOTE_CONFIG_GENERATED": "{{advanced.remoteConfigGenerated}}",
+    {{/advanced.remoteConfigGenerated}}
+    "LOG_LEVEL": "{{advanced.logLevel}}"
+  },
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{resource.assets.container.docker.apigateway-docker}}",
+      "network": "HOST"
+    },
+    "volumes": [
+      {
+        "containerPath": "{{advanced.volumeContainerPath}}",
+        "hostPath": "{{advanced.volumeHostPath}}",
+        "mode": "RW"
+      }
+    ]
+  },
+  "healthChecks": [
+    {
+      "command": {
+        "value": "curl -f -X GET http://$HOST:80/health-check"
+      },
+      "gracePeriodSeconds": 300,
+      "intervalSeconds": 60,
+      "maxConsecutiveFailures": 3,
+      "protocol": "COMMAND",
+      "timeoutSeconds": 20,
+      "delaySeconds": 15
+    }
+  ],
+  "labels": {
+    "DCOS_SERVICE_NAME": "{{service.name}}",
+    "DCOS_SERVICE_SCHEME": "http",
+    "DCOS_SERVICE_PORT_INDEX": "0"
+  }
+}
diff --git a/dcos-universe/repo/packages/A/apigateway/0/package.json b/dcos-universe/repo/packages/A/apigateway/0/package.json
new file mode 100644
index 0000000..7f72369
--- /dev/null
+++ b/dcos-universe/repo/packages/A/apigateway/0/package.json
@@ -0,0 +1,18 @@
+{
+  "packagingVersion": "4.0",
+  "description": "Adobe I/O API gateway",
+  "framework": false,
+  "maintainer": "dunguyen@adobe.com",
+  "name": "apigateway",
+  "postInstallNotes": "API Gateway has been installed. You might have to stop marathon-lb app if installed due to conflicts on the ports.",
+  "postUninstallNotes": "API Gateway has been uninstalled",
+  "scm": "https://github.com/adobe-apiplatform/apigateway",
+  "tags": [
+    "api",
+    "gateway",
+    "server"
+  ],
+  "version": "1.1.0",
+  "website": "https://github.com/adobe-apiplatform/apigateway",
+  "selected": true
+}
diff --git a/dcos-universe/repo/packages/A/apigateway/0/resource.json b/dcos-universe/repo/packages/A/apigateway/0/resource.json
new file mode 100644
index 0000000..d57b8c6
--- /dev/null
+++ b/dcos-universe/repo/packages/A/apigateway/0/resource.json
@@ -0,0 +1,14 @@
+{
+  "images": {
+    "icon-small": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-small.png?raw=true",
+    "icon-medium": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-medium.png?raw=true",
+    "icon-large": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-large.png?raw=true"
+  },
+  "assets": {
+    "container": {
+      "docker": {
+        "apigateway-docker": "adobeapiplatform/apigateway:1.1.0"
+      }
+    }
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-controller/0/config.json b/dcos-universe/repo/packages/W/whisk-controller/0/config.json
new file mode 100644
index 0000000..ea75784
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-controller/0/config.json
@@ -0,0 +1,356 @@
+{
+  "type": "object",
+  "properties": {
+    "service": {
+      "description": "OpenWhisk controller Configuration Properties",
+      "type": "object",
+      "properties": {
+        "port": {
+          "description": "Host port for OpenWhisk controller (default = 8888).",
+          "type": "integer",
+          "default": 8888,
+          "minimum": 0
+        },
+        "cpus": {
+          "description": "CPU shares to allocate to each OpenWhisk controller instance.",
+          "type": "number",
+          "minimum": 0.1,
+          "default": 1
+        },
+        "mem": {
+          "description": "Memory (MB) to allocate to each OpenWhisk controller instance.",
+          "type": "number",
+          "minimum": 512.0,
+          "default": 2048.0
+        },
+        "instances": {
+          "description": "Number of OpenWhisk controller instances to run.",
+          "type": "integer",
+          "minimum": 0,
+          "default": 1
+        },
+        "name": {
+          "description": "The name of the OpenWhisk Controller service instance.",
+          "type": "string",
+          "default": "whisk-controller"
+        },
+        "image": {
+          "description": "The docker image of the OpenWhisk Controller service instance.",
+          "type": "string",
+          "default": "openwhisk/controller"
+        },
+        "java_opts": {
+          "description": "The jvm args.",
+          "type": "string",
+          "default": "-Xmx768m"
+        },
+        "metrics_kamon": {
+          "description": "Enable kamon metrics output?",
+          "type": "boolean",
+          "default": false
+
+        },
+        "metrics_log":{
+          "description": "Enable log metrics output?",
+          "type": "boolean",
+          "default": true
+        },
+        "invoker_busy_threshold":{
+          "description": "Number of activations an invoker can receive concurrently.",
+          "type": "integer",
+          "default": 16
+        },
+        "blackbox_fraction":{
+          "description": "The portion of invokers reserved for use with blackbox containers.",
+          "type": "number",
+          "default": 0.1
+        },
+        "version_name":{
+          "description": "OpenWhisk version name.",
+          "type": "string",
+          "default": "openwhisk"
+        },
+        "version_date":{
+          "description": "OpenWhisk version date.",
+          "type": "string",
+          "default": "2018-03-06"
+        },
+        "version_buildno":{
+          "description": "OpenWhisk version build number.",
+          "type": "string",
+          "default": "latest"
+        },
+        "docker_label":{
+          "description": "Label to add to docker container.",
+          "type": "string",
+          "default": "controller"
+        },
+        "docker_log_driver":{
+          "description": "Docker log driver to use for the container.",
+          "type": "string",
+          "default": "json-file"
+        },
+        "additional_vars": {
+          "description": "Additional env vars.",
+          "items": {
+            "type": "object",
+            "properties": {
+              "key": {
+                "type": "string"
+              },
+              "value": {
+                "type": "string"
+              }
+            },
+            "required": [
+              "key",
+              "value"
+            ]
+          },
+          "default": []
+        }
+      },
+      "required": [
+        "port",
+        "cpus",
+        "name",
+        "instances",
+        "mem",
+        "image",
+        "version_date"
+      ]
+    },
+    "akka":{
+      "type": "object",
+      "properties": {
+        "actor_provider": {
+          "description": "Akka actor provider (local or cluster).",
+          "type": "string",
+          "default": "local"
+        },
+        "cluster_hostname": {
+          "description": "Akka cluster hostname (used when actor_provider is cluster).",
+          "type": "string",
+          "default": "local"
+        },
+        "cluster_port": {
+          "description": "Akka cluster port (used when actor_provider is cluster).",
+          "type": "string",
+          "default": "$PORT_2551"
+        },
+        "cluster_bind_port": {
+          "description": "Akka cluster bind port (used when actor_provider is cluster).",
+          "type": "integer",
+          "default": 2551
+        }
+      }
+    },
+    "couchdb": {
+      "description": "Linked CouchDB properties",
+      "type": "object",
+      "properties": {
+        "provider": {
+          "description": "CouchDB provider.",
+          "type": "string",
+          "default": "CouchDB"
+        },
+        "protocol": {
+          "description": "CouchDB protocol.",
+          "type": "string",
+          "default": "http"
+        },
+        "host": {
+          "description": "Host location of CouchDB cluster.",
+          "type": "string",
+          "default": "whisk-couchdb.marathon.mesos"
+        },
+        "port": {
+          "description": "Host HTTP port of CouchDB cluster (default = 5984).",
+          "type": "integer",
+          "default": 5984,
+          "minimum": 0
+        },
+        "username": {
+          "description": "CouchDB username.",
+          "type": "string",
+          "default": "whisk_admin"
+        },
+        "password": {
+          "description": "CouchDB password.",
+          "type": "string",
+          "default": "some_passw0rd"
+        },
+        "actions_db": {
+          "description": "Actions database.",
+          "type": "string",
+          "default": "local_whisks"
+        },
+        "actions_db_ddoc": {
+          "description": "Actions design doc.",
+          "type": "string",
+          "default": "whisks.v2.1.0"
+        },
+        "activations_db": {
+          "description": "Activations database.",
+          "type": "string",
+          "default": "local_activations"
+        },
+        "activations_db_ddoc": {
+          "description": "Activations design doc.",
+          "type": "string",
+          "default": "whisks.v2.1.0"
+        },
+        "activations_filter_ddoc": {
+          "description": "Activation filters design doc.",
+          "type": "string",
+          "default": "whisks-filters.v2.1.0"
+        },
+        "auths_db": {
+          "description": "Auth database.",
+          "type": "string",
+          "default": "local_subjects"
+        }
+      },
+      "required": [
+        "provider",
+        "protocol",
+        "host",
+        "port",
+        "username",
+        "password"
+      ]
+    },
+    "kafka": {
+      "description": "Linked Kafka properties",
+      "type": "object",
+      "properties": {
+        "hosts": {
+          "description": "Host location of Kafka cluster.",
+          "type": "string",
+          "default": "kafka-0-broker.kafka.mesos:9092"
+        },
+        "replication_factor": {
+          "description": "Kafka replication factor specified when creating topics.",
+          "type": "integer",
+          "default": 2,
+          "minimum": 1
+        },
+        "cache_invalidation_retention_bytes":{
+          "description": "Cache invalidation topic: Kafka retention bytes specified when creating topics.",
+          "type": "integer",
+          "default": 1073741824
+        },
+        "cache_invalidation_retention_ms":{
+          "description": "Cache invalidation topic: Kafka retention ms specified when creating topics.",
+          "type": "integer",
+          "default": 172800000
+        },
+        "cache_invalidation_segment_bytes":{
+          "description": "Cache invalidation topic: Kafka segment bytes specified when creating topics.",
+          "type": "integer",
+          "default": 536870912
+        },
+        "completed_retention_bytes":{
+          "description": "Completed topic: Kafka retention bytes specified when creating topics.",
+          "type": "integer",
+          "default": 1073741824
+        },
+        "completed_retention_ms":{
+          "description": "Completed topic: Kafka retention ms specified when creating topics.",
+          "type": "integer",
+          "default": 172800000
+        },
+        "completed_segment_bytes":{
+          "description": "Completed topic: Kafka segment bytes specified when creating topics.",
+          "type": "integer",
+          "default": 536870912
+        },
+        "health_retention_bytes":{
+          "description": "Health topic: Kafka retention bytes specified when creating topics.",
+          "type": "integer",
+          "default": 1073741824
+        },
+        "health_retention_ms":{
+          "description": "Health topic: Kafka retention ms specified when creating topics.",
+          "type": "integer",
+          "default": 172800000
+        },
+        "health_segment_bytes":{
+          "description": "Health topic: Kafka segment bytes specified when creating topics.",
+          "type": "integer",
+          "default": 536870912
+        }
+      },
+      "required": [
+        "hosts"
+      ]
+    },
+    "apigateway": {
+      "description": "Linked API Gateway properties",
+      "type": "object",
+      "properties": {
+        "host": {
+          "description": "Host location of API Gateway cluster.",
+          "type": "string",
+          "default": "apigateway.marathon.mesos"
+        },
+        "port": {
+          "description": "Host HTTP port of API Gateway cluster (default = 80).",
+          "type": "integer",
+          "default": 80,
+          "minimum": 0
+        }
+      },
+      "required": [
+        "host",
+        "port"
+      ]
+    },
+    "limits": {
+      "description": "System limits of OpenWhisk",
+      "type": "object",
+      "properties": {
+        "actions_invokes_per_minute": {
+          "description": "Number of activations per minute per namespace (either executing or queued for execution).",
+          "type": "integer",
+          "default": 60
+        },
+        "actions_invokes_concurrent": {
+          "description": "The maximum concurrent invocations allowed per namespace.",
+          "type": "integer",
+          "default": 30
+        },
+        "triggers_fires_per_minute": {
+          "description": "The allowed namespace trigger firings per minute.",
+          "type": "integer",
+          "default": 60
+        },
+        "actions_invokes_concurrent_system": {
+          "description": "The maximum concurrent invocations the system will allow across all namespaces.",
+          "type": "integer",
+          "default": 5000
+        },
+        "actions_sequence_max_length": {
+          "description": "The maximum number of actions in a sequence.",
+          "type": "integer",
+          "default": 50
+        },
+        "actions_memory_min": {
+          "description": "The min memory allowance to run an action.",
+          "type": "string",
+          "default": "128 m"
+        },
+        "actions_memory_max": {
+          "description": "The max memory allowance to run an action.",
+          "type": "string",
+          "default": "512 m"
+        },
+        "actions_memory_std": {
+          "description": "The default memory allowance to run an action.",
+          "type": "string",
+          "default": "256 m"
+        }
+      }
+    }
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-controller/0/marathon.json.mustache b/dcos-universe/repo/packages/W/whisk-controller/0/marathon.json.mustache
new file mode 100644
index 0000000..5d8b70e
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-controller/0/marathon.json.mustache
@@ -0,0 +1,117 @@
+{
+  "id": "/{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": {{service.instances}},
+  "env": {
+
+    "JAVA_OPTS": "{{service.java_opts}}",
+
+    "WHISK_VERSION_NAME": "{{service.version_name}}",
+    "WHISK_VERSION_DATE": "{{service.version_date}}",
+    "WHISK_VERSION_BUILDNO": "{{service.version_buildno}}",
+    "WHISK_LOGS_DIR": "/logs",
+
+    "DB_PROVIDER": "{{couchdb.provider}}",
+    "DB_PROTOCOL": "{{couchdb.protocol}}",
+    "DB_PORT": "{{couchdb.port}}",
+    "DB_HOST": "{{couchdb.host}}",
+    "DB_USERNAME": "{{couchdb.username}}",
+    "DB_PASSWORD": "{{couchdb.password}}",
+    "DB_WHISK_ACTIONS": "{{couchdb.actions_db}}",
+    "CONFIG_whisk_db_actionsDdoc": "{{couchdb.actions_db_ddoc}}",
+    "DB_WHISK_ACTIVATIONS": "{{couchdb.activations_db}}",
+    "CONFIG_whisk_db_activationsDdoc": "{{couchdb.activations_db_ddoc}}",
+    "CONFIG_whisk_db_activationsFilterDdoc": "{{couchdb.activations_filter_ddoc}}",
+    "DB_WHISK_AUTHS": "{{couchdb.auths_db}}",
+
+    "LIMITS_ACTIONS_INVOKES_PERMINUTE": "{{limits.actions_invokes_per_minute}}",
+    "LIMITS_ACTIONS_INVOKES_CONCURRENT": "{{limits.actions_invokes_concurrent}}",
+    "LIMITS_TRIGGERS_FIRES_PERMINUTE": "{{limits.triggers_fires_per_minute}}",
+    "LIMITS_ACTIONS_INVOKES_CONCURRENTINSYSTEM": "{{limits.actions_invokes_concurrent_system}}",
+    "LIMITS_ACTIONS_SEQUENCE_MAXLENGTH": "{{limits.actions_sequence_max_length}}",
+
+    "COMPONENT_NAME": "controller",
+    "PORT": "{{service.port}}",
+
+    "KAFKA_HOSTS": "{{kafka.hosts}}",
+    "CONFIG_whisk_kafka_replicationFactor": "{{kafka.replication_factor}}",
+    "CONFIG_whisk_kafka_topics_cacheInvalidation_retentionBytes": "{{kafka.cache_invalidation_retention_bytes}}",
+    "CONFIG_whisk_kafka_topics_cacheInvalidation_retentionMs": "{{kafka.cache_invalidation_retention_ms}}",
+    "CONFIG_whisk_kakfa_topics_cacheInvalidation_segmentBytes": "{{kafka.cache_invalidation_segment_bytes}}",
+    "CONFIG_whisk_kafka_topics_completed_retentionBytes": "{{kafka.completed_retention_bytes}}",
+    "CONFIG_whisk_kafka_topics_completed_retentionMs": "{{kafka.completed_retention_ms}}",
+    "CONFIG_whisk_kakfa_topics_completed_segmentBytes": "{{kafka.completed_segment_bytes}}",
+    "CONFIG_whisk_kafka_topics_health_retentionBytes": "{{kafka.health_retention_bytes}}",
+    "CONFIG_whisk_kafka_topics_health_retentionMs": "{{kafka.health_retention_ms}}",
+    "CONFIG_whisk_kakfa_topics_health_segmentBytes": "{{kafka.health_segment_bytes}}",
+
+    "RUNTIMES_MANIFEST": "{\"runtimes\":{\"nodejs\":[{\"kind\":\"nodejs\",\"image\":{\"name\":\"nodejsaction\"},\"deprecated\":true},{\"kind\":\"nodejs:6\",\"default\":true,\"image\":{\"name\":\"nodejs6action\"}}],\"python\":[{\"kind\":\"python\",\"image\":{\"name\":\"python2action\"}},{\"kind\":\"python:2\",\"default\":true,\"image\":{\"name\":\"python2action\"}},{\"kind\":\"python:3\",\"image\":{\"name\":\"python3action\"}}],\"swift\":[{\"kind\":\"swift\",\"image\":{\"name\":\"swiftact [...]
+
+    "LOADBALANCER_HOST": "{{apigateway.host}}",
+
+    "CONTROLLER_INSTANCES": "1",
+    "CONTROLLER_LOCALBOOKKEEPING": "true",
+    "CONTROLLER_HA": "false",
+
+    "AKKA_CLUSTER_SEED_NODES": "[]",
+
+    "METRICS_KAMON": "{{service.metrics_kamon}}",
+    "METRICS_LOG": "{{service.metrics_log}}",
+
+    "CONFIG_whisk_loadbalancer_invokerBusyThreshold": "{{service.invoker_busy_threshold}}",
+    "CONFIG_whisk_loadbalancer_blackboxFraction": "{{service.blackbox_fraction}}",
+    "CONFIG_akka_actor_provider":"{{akka.actor_provider}}",
+    "CONFIG_akka_remote_netty_tcp_hostname":"{{akka.cluster_hostname}}",
+    "CONFIG_akka_remote_netty_tcp_port":"{{akka.cluster_port}}",
+    "CONFIG_akka_remote_netty_tcp_bindPort":"{{akka.cluster_bind_port}}",
+
+    {{#service.additional_vars}}
+        "{{key}}":"{{value}}",
+    {{/service.additional_vars}}
+
+    "CONFIG_whisk_memory_min": "{{limits.actions_memory_min}}",
+    "CONFIG_whisk_memory_max": "{{limits.actions_memory_max}}",
+    "CONFIG_whisk_memory_std": "{{limits.actions_memory_std}}"
+  },
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{service.image}}",
+      "network": "BRIDGE",
+      "parameters": [
+        {
+          "key":"label",
+          "value":"{{service.docker_label}}"
+        },
+        {
+          "key":"log-driver",
+          "value":"{{service.docker_log_driver}}"
+        }
+      ],
+      "portMappings": [
+         {
+           "containerPort": 8888,
+           "hostPort": {{service.port}},
+           "servicePort": 0,
+           "protocol": "tcp"
+         }
+       ],
+      "forcePullImage": true
+    }
+  },
+  "cmd": "/bin/sh -c \"exec /init.sh 0 >> /dev/stdout\"",
+  "healthChecks": [
+    {
+      "path": "/ping",
+      "protocol": "HTTP",
+      "gracePeriodSeconds": 30,
+      "intervalSeconds": 15,
+      "timeoutSeconds": 2,
+      "maxConsecutiveFailures": 3
+    }
+  ],
+  "labels": {
+    "DCOS_SERVICE_NAME": "{{service.name}}"
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-controller/0/package.json b/dcos-universe/repo/packages/W/whisk-controller/0/package.json
new file mode 100644
index 0000000..f0a9bb5
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-controller/0/package.json
@@ -0,0 +1,19 @@
+{
+  "description": "OpenWhisk Controller service running on DC/OS",
+  "framework": true,
+  "maintainer": "dunguyen@adobe.com",
+  "minDcosReleaseVersion": "1.9",
+  "name": "whisk-controller",
+  "packagingVersion": "4.0",
+  "postInstallNotes": "DC/OS OpenWhisk controller has been successfully installed!",
+  "postUninstallNotes": "DC/OS OpenWhisk controller service has been uninstalled.",
+  "preInstallNotes": "OpenWhisk Controller requires Kafka, CouchDB, APIGateway and at least one running Invoker in the same DC/OS cluster.",
+  "selected": true,
+  "tags": [
+    "openwhisk",
+    "controller",
+    "serverless",
+    "lambda"
+  ],
+  "version": "0.1"
+}
diff --git a/dcos-universe/repo/packages/W/whisk-controller/0/resource.json b/dcos-universe/repo/packages/W/whisk-controller/0/resource.json
new file mode 100644
index 0000000..e44eca9
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-controller/0/resource.json
@@ -0,0 +1,7 @@
+{
+  "images": {
+    "icon-small": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-small.png?raw=true",
+    "icon-medium": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-medium.png?raw=true",
+    "icon-large": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-large.png?raw=true"
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-couchdb/0/config.json b/dcos-universe/repo/packages/W/whisk-couchdb/0/config.json
new file mode 100644
index 0000000..4e3deab
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-couchdb/0/config.json
@@ -0,0 +1,93 @@
+{
+  "type": "object",
+  "properties": {
+    "service": {
+      "description": "OpenWhisk CouchDB Configuration Properties",
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "couchdb-user": {
+          "description": "CouchDB username.",
+          "type": "string",
+          "default": "whisk_admin"
+        },
+        "couchdb-password": {
+          "description": "CouchDB password.",
+          "type": "string",
+          "default": "some_passw0rd"
+        },
+        "couchdb-port": {
+          "description": "Host port for CouchDB (default = 5984)",
+          "type": "integer",
+          "default": 5984,
+          "minimum": 0
+        },
+        "cpus": {
+          "description": "CPU shares to allocate to each CouchDB instance.",
+          "type": "number",
+          "minimum": 0.1,
+          "default": 0.5
+        },
+        "mem": {
+          "description": "Memory (MB) to allocate to each CouchDB instance.",
+          "type": "number",
+          "minimum": 512.0,
+          "default": 1024.0
+        },
+        "volume-size": {
+          "description": "Size of data volume (MiB) to allocate to each CouchDB instance.",
+          "type": "number",
+          "minimum": 1024,
+          "default": 10240
+        },
+        "instances": {
+          "description": "Number of CouchDB instances to run.",
+          "type": "integer",
+          "minimum": 0,
+          "default": 1
+        },
+        "name": {
+          "description": "The name of the CouchDB service instance.",
+          "type": "string",
+          "default": "whisk-couchdb"
+        },
+        "placement":{
+          "description": "Constraints.",
+          "type": "string",
+          "default": "[]"
+        },
+        "additional_vars": {
+          "description": "Additional env vars.",
+          "items": {
+            "type": "object",
+            "properties": {
+              "key": {
+                "type": "string"
+              },
+              "value": {
+                "type": "string"
+              }
+            },
+            "required": [
+              "key",
+              "value"
+            ]
+          },
+          "default": []
+        }
+      },
+      "required": [
+        "couchdb-user",
+        "couchdb-password",
+        "cpus",
+        "name",
+        "instances",
+        "mem",
+        "volume-size"
+      ]
+    }
+  },
+  "required": [
+    "service"
+  ]
+}
diff --git a/dcos-universe/repo/packages/W/whisk-couchdb/0/marathon.json.mustache b/dcos-universe/repo/packages/W/whisk-couchdb/0/marathon.json.mustache
new file mode 100644
index 0000000..07cc13b
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-couchdb/0/marathon.json.mustache
@@ -0,0 +1,59 @@
+{
+  "id": "/{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": {{service.instances}},
+  "constraints": {{{service.placement}}},
+  "env": {
+    {{#service.additional_vars}}
+        "{{key}}":"{{value}}",
+    {{/service.additional_vars}}
+    "COUCHDB_USER": "{{service.couchdb-user}}",
+    "COUCHDB_PASSWORD": "{{service.couchdb-password}}",
+    "DB_PREFIX": "local_",
+    "DB_HOST": "127.0.0.1",
+    "DB_PORT": "5984"
+  },
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{resource.assets.container.docker.whisk-couchdb-docker}}",
+      "network": "BRIDGE",
+      "portMappings": [
+        {
+          "containerPort": {{service.couchdb-port}},
+          "hostPort": {{service.couchdb-port}},
+          "servicePort": 0,
+          "protocol": "tcp"
+        }
+      ]
+    },
+    "volumes": [
+      {
+        "containerPath": "/usr/local/var/lib/couchdb",
+        "hostPath": "couchdb",
+        "mode": "RW"
+      },
+      {
+        "containerPath": "couchdb",
+        "mode": "RW",
+        "persistent": {
+          "size": {{service.volume-size}}
+        }
+      }
+    ]
+  },
+  "healthChecks": [
+    {
+      "path": "/",
+      "protocol": "HTTP",
+      "gracePeriodSeconds": 30,
+      "intervalSeconds": 60,
+      "timeoutSeconds": 20,
+      "maxConsecutiveFailures": 3
+    }
+  ],
+  "labels": {
+    "DCOS_SERVICE_NAME": "{{service.name}}"
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-couchdb/0/package.json b/dcos-universe/repo/packages/W/whisk-couchdb/0/package.json
new file mode 100644
index 0000000..e55f23c
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-couchdb/0/package.json
@@ -0,0 +1,16 @@
+{
+  "description": "OpenWhisk CouchDB service running on DC/OS",
+  "framework": true,
+  "maintainer": "dunguyen@adobe.com",
+  "minDcosReleaseVersion": "1.9",
+  "name": "whisk-couchdb",
+  "packagingVersion": "4.0",
+  "postInstallNotes": "DC/OS OpenWhisk CouchDB has been successfully installed!",
+  "postUninstallNotes": "DC/OS OpenWhisk CouchDB service has been uninstalled.",
+  "selected": true,
+  "tags": [
+    "database",
+    "nosql"
+  ],
+  "version": "1.6"
+}
diff --git a/dcos-universe/repo/packages/W/whisk-couchdb/0/resource.json b/dcos-universe/repo/packages/W/whisk-couchdb/0/resource.json
new file mode 100644
index 0000000..d49db68
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-couchdb/0/resource.json
@@ -0,0 +1,14 @@
+{
+  "images": {
+    "icon-small": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-small.png?raw=true",
+    "icon-medium": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-medium.png?raw=true",
+    "icon-large": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-large.png?raw=true"
+  },
+  "assets": {
+    "container": {
+      "docker": {
+        "whisk-couchdb-docker": "openwhisk/kube-couchdb"
+      }
+    }
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-invoker/0/config.json b/dcos-universe/repo/packages/W/whisk-invoker/0/config.json
new file mode 100644
index 0000000..e8f8e9c
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-invoker/0/config.json
@@ -0,0 +1,314 @@
+{
+  "type": "object",
+  "properties": {
+    "service": {
+      "description": "OpenWhisk Invoker Configuration Properties",
+      "type": "object",
+      "properties": {
+        "port": {
+          "description": "Host port for OpenWhisk invoker (default = 8085)",
+          "type": "integer",
+          "default": 8085,
+          "minimum": 0
+        },
+        "cpus": {
+          "description": "CPU shares to allocate to each OpenWhisk invoker instance.",
+          "type": "number",
+          "minimum": 0.1,
+          "default": 0.5
+        },
+        "mem": {
+          "description": "Memory (MB) to allocate to each OpenWhisk invoker instance.",
+          "type": "number",
+          "minimum": 512.0,
+          "default": 1024.0
+        },
+        "instances": {
+          "description": "Number of OpenWhisk invoker instances to run.",
+          "type": "integer",
+          "minimum": 0,
+          "default": 2
+        },
+        "name": {
+          "description": "The name of the OpenWhisk Invoker service instance.",
+          "type": "string",
+          "default": "whisk-invoker"
+        },
+        "image": {
+          "description": "The docker image of the OpenWhisk Invoker service instance.",
+          "type": "string",
+          "default": "openwhisk/invoker"
+        },
+        "java_opts": {
+          "description": "The jvm args.",
+          "type": "string",
+          "default": "-Xmx768m"
+        },
+        "invoker_numcore": {
+          "description": "The number of cores available on each invoker instance.",
+          "type": "integer",
+          "default": 4
+        },
+        "invoker_coreshare":{
+          "description": "The factor by which cores will be shared in each invoker instance.",
+          "type": "integer",
+          "default": 2
+        },
+        "metrics_kamon": {
+          "description": "Enable kamon metrics output?",
+          "type": "boolean",
+          "default": false
+
+        },
+        "metrics_log":{
+          "description": "Enable log metrics output?",
+          "type": "boolean",
+          "default": true
+        },
+        "version_name":{
+          "description": "OpenWhisk version name.",
+          "type": "string",
+          "default": "openwhisk"
+        },
+        "version_date":{
+          "description": "OpenWhisk version date.",
+          "type": "string",
+          "default": "2018-03-06"
+        },
+        "version_buildno":{
+          "description": "OpenWhisk version build number.",
+          "type": "string",
+          "default": "latest"
+        },
+        "docker_label":{
+          "description": "Label to add to docker container.",
+          "type": "string",
+          "default": "invoker"
+        },
+        "docker_log_driver":{
+          "description": "Docker log driver to use for the container.",
+          "type": "string",
+          "default": "json-file"
+        },
+        "docker_auth_uri":{
+          "description": "HTTP URI for downloading docker.tar.gz for private docker.",
+          "type": "string"
+        },
+        "additional_vars": {
+          "description": "Additional env vars.",
+          "items": {
+            "type": "object",
+            "properties": {
+              "key": {
+                "type": "string"
+              },
+              "value": {
+                "type": "string"
+              }
+            },
+            "required": [
+              "key",
+              "value"
+            ]
+          },
+          "default": []
+        }
+      },
+      "required": [
+        "port",
+        "cpus",
+        "name",
+        "instances",
+        "mem",
+        "image",
+        "version_date"
+      ]
+    },
+    "action_container": {
+      "description": "Action container configs",
+      "type": "object",
+      "properties": {
+        "docker_registry": {
+          "description": "The value to be set as DOCKER_REGISTRY in environment variables.",
+          "type": "string",
+          "default": " "
+        },
+        "docker_image_prefix": {
+          "description": "The value to be set as DOCKER_IMAGE_PREFIX in environment variables.",
+          "type": "string",
+          "default": "openwhisk"
+        },
+        "docker_image_tag": {
+          "description": "The value to be set as DOCKER_IMAGE_TAG in environment variables.",
+          "type": "string",
+          "default": "latest"
+        },
+        "docker_network_name": {
+          "description": "The docker network name to start the action containers on. Defaults to 'bridge'.",
+          "type": "string",
+          "default": "bridge"
+        },
+        "memory_min": {
+          "description": "The min memory allowance to run an action.",
+          "type": "string",
+          "default": "128 m"
+        },
+        "memory_max": {
+          "description": "The max memory allowance to run an action.",
+          "type": "string",
+          "default": "512 m"
+        },
+        "memory_std": {
+          "description": "The standard memory to run an action.",
+          "type": "string",
+          "default": "256 m"
+        }
+      },
+      "required": [
+        "docker_registry",
+        "docker_image_prefix",
+        "docker_image_tag"
+      ]
+    },
+    "couchdb": {
+      "description": "Linked CouchDB properties",
+      "type": "object",
+      "properties": {
+        "provider": {
+          "description": "CouchDB provider.",
+          "type": "string",
+          "default": "CouchDB"
+        },
+        "protocol": {
+          "description": "CouchDB protocol.",
+          "type": "string",
+          "default": "http"
+        },
+        "host": {
+          "description": "Host location of CouchDB cluster.",
+          "type": "string",
+          "default": "whisk-couchdb.marathon.mesos"
+        },
+        "port": {
+          "description": "Host HTTP port of CouchDB cluster (default = 5984).",
+          "type": "integer",
+          "default": 5984,
+          "minimum": 0
+        },
+        "username": {
+          "description": "CouchDB username.",
+          "type": "string",
+          "default": "whisk_admin"
+        },
+        "password": {
+          "description": "CouchDB password.",
+          "type": "string",
+          "default": "some_passw0rd"
+        },
+        "actions_db": {
+          "description": "Actions database.",
+          "type": "string",
+          "default": "local_whisks"
+        },
+        "actions_db_ddoc": {
+          "description": "Actions design doc.",
+          "type": "string",
+          "default": "whisks.v2.1.0"
+        },
+        "activations_db": {
+          "description": "Activations database.",
+          "type": "string",
+          "default": "local_activations"
+        },
+        "activations_db_ddoc": {
+          "description": "Activations design doc.",
+          "type": "string",
+          "default": "whisks.v2.1.0"
+        },
+        "activations_filter_ddoc": {
+          "description": "Activation filters design doc.",
+          "type": "string",
+          "default": "whisks-filters.v2.1.0"
+        }
+      },
+      "required": [
+        "provider",
+        "protocol",
+        "host",
+        "port",
+        "username",
+        "password"
+      ]
+    },
+    "kafka": {
+      "description": "Linked Kafka properties",
+      "type": "object",
+      "properties": {
+        "hosts": {
+          "description": "Connection string of Kafka cluster.",
+          "type": "string",
+          "default": "kafka-0-broker.kafka.mesos:9092"
+        },
+        "replication_factor": {
+          "description": "Kafka replication factor specified when creating topics.",
+          "type": "integer",
+          "default": 2,
+          "minimum": 1
+        },
+        "retention_bytes":{
+          "description": "Kafka retention bytes specified when creating topics.",
+          "type": "integer",
+          "default": 1073741824
+        },
+        "retention_ms":{
+          "description": "Kafka retention ms specified when creating topics.",
+          "type": "integer",
+          "default": 172800000
+        },
+        "segment_bytes":{
+          "description": "Kafka segment bytes specified when creating topics.",
+          "type": "integer",
+          "default": 536870912
+        }
+      },
+      "required": [
+        "hosts"
+      ]
+    },
+    "zookeeper": {
+      "description": "Linked ZooKeeper properties",
+      "type": "object",
+      "properties": {
+        "hosts": {
+          "description": "Connection string for ZooKeeper.",
+          "type": "string",
+          "default": "master.mesos"
+        }
+      },
+      "required": [
+        "hosts"
+      ]
+    },
+    "apigateway": {
+      "description": "Linked API Gateway properties",
+      "type": "object",
+      "properties": {
+        "host": {
+          "description": "Host location of API Gateway cluster.",
+          "type": "string",
+          "default": "apigateway.marathon.mesos"
+        },
+        "port": {
+          "description": "Host HTTP port of API Gateway cluster.",
+          "type": "integer",
+          "default": 80,
+          "minimum": 0
+        }
+      },
+      "required": [
+        "host",
+        "port"
+      ]
+    }
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-invoker/0/marathon.json.mustache b/dcos-universe/repo/packages/W/whisk-invoker/0/marathon.json.mustache
new file mode 100644
index 0000000..594e47a
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-invoker/0/marathon.json.mustache
@@ -0,0 +1,149 @@
+{
+  "id": "/{{service.name}}",
+  "cpus": {{service.cpus}},
+  "mem": {{service.mem}},
+  "instances": {{service.instances}},
+  "constraints": [
+    ["hostname","UNIQUE"]
+  ],
+  "env": {
+
+    "JAVA_OPTS": "{{service.java_opts}}",
+
+    "WHISK_VERSION_NAME": "{{service.version_name}}",
+    "WHISK_VERSION_DATE": "{{service.version_date}}",
+    "WHISK_VERSION_BUILDNO": "{{service.version_buildno}}",
+    "WHISK_LOGS_DIR": "/logs",
+
+    "COMPONENT_NAME": "invoker",
+    "PORT": "{{service.port}}",
+
+    "KAFKA_HOSTS": "{{kafka.hosts}}",
+    "CONFIG_whisk_kafka_replicationFactor": "{{kafka.replication_factor}}",
+    "CONFIG_whisk_kafka_topics_invoker_retentionBytes": "{{kafka.retention_bytes}}",
+    "CONFIG_whisk_kafka_topics_invoker_retentionMs": "{{kafka.retention_ms}}",
+    "CONFIG_whisk_kakfa_topics_invoker_segmentBytes": "{{kafka.segment_bytes}}",
+
+    "DB_PROVIDER": "{{couchdb.provider}}",
+    "DB_PROTOCOL": "{{couchdb.protocol}}",
+    "DB_PORT": "{{couchdb.port}}",
+    "DB_HOST": "{{couchdb.host}}",
+    "DB_USERNAME": "{{couchdb.username}}",
+    "DB_PASSWORD": "{{couchdb.password}}",
+    "DB_WHISK_ACTIONS": "{{couchdb.actions_db}}",
+    "CONFIG_whisk_db_actionsDdoc": "{{couchdb.actions_db_ddoc}}",
+    "DB_WHISK_ACTIVATIONS": "{{couchdb.activations_db}}",
+    "CONFIG_whisk_db_activationsDdoc": "{{couchdb.activations_db_ddoc}}",
+    "CONFIG_whisk_db_activationsFilterDdoc": "{{couchdb.activations_filter_ddoc}}",
+
+    "EDGE_HOST": "{{apigateway.host}}",
+    "WHISK_API_HOST_NAME": "{{apigateway.host}}",
+    "EDGE_HOST_APIPORT": "{{apigateway.port}}",
+
+    "RUNTIMES_MANIFEST": "{\"runtimes\":{\"nodejs\":[{\"kind\":\"nodejs\",\"image\":{\"name\":\"nodejsaction\"},\"deprecated\":true},{\"kind\":\"nodejs:6\",\"default\":true,\"image\":{\"name\":\"nodejs6action\"}}],\"python\":[{\"kind\":\"python\",\"image\":{\"name\":\"python2action\"}},{\"kind\":\"python:2\",\"default\":true,\"image\":{\"name\":\"python2action\"}},{\"kind\":\"python:3\",\"image\":{\"name\":\"python3action\"}}],\"swift\":[{\"kind\":\"swift\",\"image\":{\"name\":\"swiftact [...]
+
+    "DOCKER_REGISTRY": "{{action_container.docker_registry}}",
+    "DOCKER_IMAGE_PREFIX": "{{action_container.docker_image_prefix}}",
+    "DOCKER_IMAGE_TAG": "{{action_container.docker_image_tag}}",
+
+    "INVOKER_CONTAINER_NETWORK": "{{action_container.docker_network_name}}",
+    "CONFIG_whisk_containerFactory_containerArgs_numCore": "{{service.invoker_numcore}}",
+    "CONFIG_whisk_containerFactory_containerArgs_coreShare": "{{service.invoker_coreshare}}",
+    "INVOKER_INSTANCES": "1",
+    "CONFIG_whisk_docker_containerFactory_useRunc": "false",
+
+    "METRICS_KAMON": "{{service.metrics_kamon}}",
+    "METRICS_LOG": "{{service.metrics_log}}",
+
+    "ZOOKEEPER_HOSTS": "{{zookeeper.hosts}}",
+
+    {{#service.additional_vars}}
+        "{{key}}":"{{value}}",
+    {{/service.additional_vars}}
+
+    "CONFIG_whisk_memory_min": "{{action_container.memory_min}}",
+    "CONFIG_whisk_memory_max": "{{action_container.memory_max}}",
+    "CONFIG_whisk_memory_std": "{{action_container.memory_std}}"
+  },
+  "container": {
+    "type": "DOCKER",
+    "docker": {
+      "image": "{{service.image}}",
+      "network": "BRIDGE",
+      "parameters": [
+        {
+          "key":"label",
+          "value":"{{{service.docker_label}}}"
+        },
+        {
+          "key":"log-driver",
+          "value":"{{service.docker_log_driver}}"
+        }
+      ],
+      "portMappings": [
+        {
+          "containerPort": 8085,
+          "hostPort": {{service.port}},
+          "servicePort": 0,
+          "protocol": "tcp"
+        }
+      ],
+      "privileged": true,
+      "forcePullImage": true
+    },
+    "volumes": [
+      {
+        "containerPath": "/logs",
+        "hostPath": "~/tmp/openwhisk/invoker/logs",
+        "mode": "RW"
+      },
+      {
+        "containerPath": "/var/run/docker.sock",
+        "hostPath": "/var/run/docker.sock",
+        "mode": "RW"
+      },
+      {
+        "containerPath": "/containers",
+        "hostPath": "/var/lib/docker/containers",
+        "mode": "RW"
+      },
+      {
+        "containerPath": "/sys/fs/cgroup",
+        "hostPath": "/sys/fs/cgroup",
+        "mode": "RW"
+      },
+      {
+        "containerPath": "/run/runc",
+        "hostPath": "/run/runc",
+        "mode": "RW"
+      }
+    ]
+  },
+  {{#service.docker_auth_uri}}
+  "fetch": [
+    {
+      "uri": "{{service.docker_auth_uri}}",
+      "extract": true,
+      "executable": false,
+      "cache": false
+    }
+  ],
+  "cmd": "cp -R /mnt/mesos/sandbox/.docker/ ~/.docker/ && /bin/sh -c \"exec /init.sh --name $LIBPROCESS_IP >> /dev/stdout\"",
+  {{/service.docker_auth_uri}}
+  {{^service.docker_auth_uri}}
+  "cmd": "/bin/sh -c \"exec /init.sh --name $LIBPROCESS_IP >> /dev/stdout\"",
+  {{/service.docker_auth_uri}}
+  "healthChecks": [
+    {
+      "path": "/ping",
+      "protocol": "HTTP",
+      "gracePeriodSeconds": 30,
+      "intervalSeconds": 15,
+      "timeoutSeconds": 2,
+      "maxConsecutiveFailures": 3
+    }
+  ],
+  "labels": {
+    "DCOS_SERVICE_NAME": "{{service.name}}"
+  }
+}
diff --git a/dcos-universe/repo/packages/W/whisk-invoker/0/package.json b/dcos-universe/repo/packages/W/whisk-invoker/0/package.json
new file mode 100644
index 0000000..97fd7f3
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-invoker/0/package.json
@@ -0,0 +1,19 @@
+{
+  "description": "OpenWhisk Invoker service running on DC/OS",
+  "framework": true,
+  "maintainer": "dunguyen@adobe.com",
+  "minDcosReleaseVersion": "1.9",
+  "name": "whisk-invoker",
+  "packagingVersion": "4.0",
+  "postInstallNotes": "DC/OS OpenWhisk invoker has been successfully installed!",
+  "postUninstallNotes": "DC/OS OpenWhisk invoker service has been uninstalled.",
+  "preInstallNotes": "OpenWhisk Invoker requires Kafka, CouchDB and APIGateway as dependencies.",
+  "selected": true,
+  "tags": [
+    "openwhisk",
+    "invoker",
+    "serverless",
+    "lambda"
+  ],
+  "version": "0.1"
+}
diff --git a/dcos-universe/repo/packages/W/whisk-invoker/0/resource.json b/dcos-universe/repo/packages/W/whisk-invoker/0/resource.json
new file mode 100644
index 0000000..e44eca9
--- /dev/null
+++ b/dcos-universe/repo/packages/W/whisk-invoker/0/resource.json
@@ -0,0 +1,7 @@
+{
+  "images": {
+    "icon-small": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-small.png?raw=true",
+    "icon-medium": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-medium.png?raw=true",
+    "icon-large": "https://github.com/dcos/dcos-ui/blob/master/plugins/services/src/img/icon-service-default-large.png?raw=true"
+  }
+}
diff --git a/dcos-universe/scripts/build.sh b/dcos-universe/scripts/build.sh
new file mode 100755
index 0000000..a6ddf24
--- /dev/null
+++ b/dcos-universe/scripts/build.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+REPO_BASE_DIR=${SCRIPTS_DIR}/..
+
+echo "Building the universe!"
+
+mkdir -p ${REPO_BASE_DIR}/target/
+
+# Create a new virtual environment
+rm -rf ${REPO_BASE_DIR}/target/venv
+python3 -m venv ${REPO_BASE_DIR}/target/venv
+
+# Install dependencies
+${REPO_BASE_DIR}/target/venv/bin/pip install -r ${SCRIPTS_DIR}/requirements/requirements.txt
+
+"${REPO_BASE_DIR}"/target/venv/bin/python3 "$SCRIPTS_DIR"/validate_packages.py
+"${REPO_BASE_DIR}"/target/venv/bin/python3 "$SCRIPTS_DIR"/gen_universe.py \
+  --repository="${REPO_BASE_DIR}"/repo/packages/ --out-dir="${REPO_BASE_DIR}"/target/
+
+# Delete virtual environment
+rm -rf ${REPO_BASE_DIR}/target/venv
diff --git a/dcos-universe/scripts/diff_version.sh b/dcos-universe/scripts/diff_version.sh
new file mode 100755
index 0000000..b1ea2c9
--- /dev/null
+++ b/dcos-universe/scripts/diff_version.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+
+if [ "$#" -ne 3 ]; then
+  echo "Usage: $0 package-name revision1 revision2 (e.g., nexus 3 4)" >&2
+  exit 1
+fi
+
+SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
+REPO_DIR=$SCRIPTS_DIR/../repo/packages
+
+echo "Diffing package $1 revision $2 revision $3"
+
+# get capital first letter for directory name
+FIRSTCHAR=`echo $1 | cut -c1-1 | tr [:lower:] [:upper:]`
+
+for f in ${REPO_DIR}/$FIRSTCHAR/$1/$2/*
+do
+        BASE_NAME=$(basename $f)
+	echo ""
+	echo ""
+	echo "Diffing File $BASE_NAME:"
+	diff ${REPO_DIR}/$FIRSTCHAR/$1/$2/$BASE_NAME ${REPO_DIR}/$FIRSTCHAR/$1/$3/$BASE_NAME || true 
+done
+
+
diff --git a/dcos-universe/scripts/gen_universe.py b/dcos-universe/scripts/gen_universe.py
new file mode 100755
index 0000000..fb80550
--- /dev/null
+++ b/dcos-universe/scripts/gen_universe.py
@@ -0,0 +1,754 @@
+#!/usr/bin/env python3
+
+from distutils.version import LooseVersion
+import argparse
+import base64
+import collections
+import copy
+import itertools
+import json
+import jsonschema
+import pathlib
+import shutil
+import sys
+import tempfile
+import re
+import zipfile
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='This script generates all of the universe objects from '
+        'the universe repository. The files created in --out-dir are: '
+        'universe.json.')
+    parser.add_argument(
+        '--repository',
+        required=True,
+        type=pathlib.Path,
+        help='Path to the top level package directory. E.g. repo/packages')
+    parser.add_argument(
+        '--out-dir',
+        dest='outdir',
+        required=True,
+        type=pathlib.Path,
+        help='Path to the directory to use to store all universe objects')
+    args = parser.parse_args()
+
+    if not args.outdir.is_dir():
+        print('The path in --out-dir [{}] is not a directory. Please create it'
+              ' before running this script.'.format(args.outdir))
+        return
+
+    if not args.repository.is_dir():
+        print('The path in --repository [{}] is not a directory.'.format(
+            args.repository))
+        return
+
+    packages = [
+        generate_package_from_path(
+            args.repository,
+            package_name,
+            release_version)
+        for package_name, release_version
+        in enumerate_dcos_packages(args.repository)
+    ]
+
+    # Render entire universe
+    universe_path = args.outdir / 'universe.json'
+    with universe_path.open('w', encoding='utf-8') as universe_file:
+        json.dump({'packages': packages}, universe_file)
+    ct_universe_path = args.outdir / 'universe.content_type'
+    create_content_type_file(ct_universe_path, "v4")
+
+    # Render empty json
+    empty_path = args.outdir / 'repo-empty-v3.json'
+    with empty_path.open('w', encoding='utf-8') as universe_file:
+        json.dump({'packages': []}, universe_file)
+    ct_empty_path = args.outdir / 'repo-empty-v3.content_type'
+    create_content_type_file(ct_empty_path, "v3")
+
+    # create universe-by-version files for `dcos_versions`
+    dcos_versions = ["1.6.1", "1.7", "1.8", "1.9", "1.10", "1.11"]
+    [render_universe_by_version(
+        args.outdir, packages, version) for version in dcos_versions]
+
+
+def render_universe_by_version(outdir, packages, version):
+    """Render universe packages for `version`. Zip files for versions < 1.8,
+    and json files for version >= 1.8
+
+    :param outdir: Path to the directory to use to store all universe objects
+    :type outdir: str
+    :param packages: package dictionary
+    :type packages: dict
+    :param version: DC/OS version
+    :type version: str
+    :rtype: None
+    """
+
+    if LooseVersion(version) < LooseVersion("1.8"):
+        render_zip_universe_by_version(outdir, packages, version)
+    else:
+        file_path = render_json_by_version(outdir, packages, version)
+        _validate_repo(file_path, version)
+        render_content_type_file_by_version(outdir, version)
+
+
+def json_escape_compatibility(schema: collections.OrderedDict) -> collections.OrderedDict:
+    """ Further escape any singly escaped stringified JSON in config """
+
+    for value in schema.values():
+        if "description" in value:
+            value["description"] = escape_json_string(value["description"])
+
+        if "type" in value:
+            if value["type"] == "string" and "default" in value:
+                value["default"] = escape_json_string(value["default"])
+            elif value["type"] == "object" and "properties" in value:
+                value["properties"] = json_escape_compatibility(value["properties"])
+
+    return schema
+
+
+def escape_json_string(string: str) -> str:
+    """ Makes any single escaped double quotes doubly escaped. """
+
+    def escape_underescaped_slash(matchobj):
+        """ Return adjacent character + extra escaped double quote. """
+        return matchobj.group(1) + "\\\""
+
+    # This regex means: match .\" except \\\" while capturing `.`
+    return re.sub('([^\\\\])\\\"', escape_underescaped_slash, string)
+
+
+def render_content_type_file_by_version(outdir, version):
+    """Render content type file for `version`
+
+    :param outdir: Path to the directory to use to store all universe objects
+    :type outdir: str
+    :param version: DC/OS version
+    :type version: str
+    :rtype: None
+    """
+
+    universe_version = \
+        "v3" if LooseVersion(version) < LooseVersion("1.10") else "v4"
+    ct_file_path = \
+        outdir / 'repo-up-to-{}.content_type'.format(version)
+    create_content_type_file(ct_file_path, universe_version)
+
+
+def create_content_type_file(path, universe_version):
+    """ Creates a file with universe repo version `universe_version` content-type
+    as its contents.
+
+    :param path: the name of the content-type file
+    :type path: str
+    :param universe_version: Universe content type version: "v3" or "v4"
+    :type universe_version: str
+    :rtype: None
+    """
+    with path.open('w', encoding='utf-8') as ct_file:
+        content_type = format_universe_repo_content_type(universe_version)
+        ct_file.write(content_type)
+
+
+def format_universe_repo_content_type(universe_version):
+    """ Formats a universe repo content-type of version `universe-version`
+
+    :param universe_version: Universe content type version: "v3" or "v4"
+    :type universe_version: str
+    :return: content-type of the universe repo version `universe_version`
+    :rtype: str
+    """
+    content_type = "application/" \
+                   "vnd.dcos.universe.repo+json;" \
+                   "charset=utf-8;version=" \
+                   + universe_version
+    return content_type
+
+
+def render_json_by_version(outdir, packages, version):
+    """Render json file for `version`
+
+    :param outdir: Path to the directory to use to store all universe objects
+    :type outdir: str
+    :param packages: package dictionary
+    :type packages: dict
+    :param version: DC/OS version
+    :type version: str
+    :return: the path where the universe was stored
+    :rtype: str
+    """
+
+    packages = filter_and_downgrade_packages_by_version(packages, version)
+
+    json_file_path = outdir / 'repo-up-to-{}.json'.format(version)
+    with json_file_path.open('w', encoding='utf-8') as universe_file:
+        json.dump({'packages': packages}, universe_file)
+
+    return json_file_path
+
+
+def filter_and_downgrade_packages_by_version(packages, version):
+    """Filter packages by `version` and the downgrade if needed
+    :param packages: package dictionary
+    :type packages: dict
+    :param version: DC/OS version
+    :type version: str
+    :return packages filtered (and may be downgraded) on `version`
+    :rtype package dictionary
+    """
+    packages = [
+        package for package in packages if filter_by_version(package, version)
+    ]
+
+    if LooseVersion(version) < LooseVersion('1.10'):
+        # Prior to 1.10, Cosmos had a rendering bug that required
+        # stringified JSON to be doubly escaped. This was corrected
+        # in 1.10, but it means that packages with stringified JSON parameters
+        # that need to bridge versions must be accomodated.
+        #
+        # < 1.9 style escaping:
+        # \\\"field\\\": \\\"value\\\"
+        #
+        # >= 1.10 style escaping:
+        # \"field\": \"value\"
+        for package in packages:
+            if "config" in package and "properties" in package["config"]:
+                # The rough shape of a config file is:
+                # {
+                #   "schema": ...,
+                #   "properties": { }
+                # }
+                # Send just the top level properties in to the recursive
+                # function json_escape_compatibility.
+                package["config"]["properties"] = json_escape_compatibility(
+                    package["config"]["properties"])
+        packages = [downgrade_package_to_v3(package) for package in packages]
+    return packages
+
+
+def render_zip_universe_by_version(outdir, packages, version):
+    """Render zip universe for `version`
+
+    :param outdir: Path to the directory to use to store all universe objects
+    :type outdir: str
+    :param package: package dictionary
+    :type package: dict
+    :param version: DC/OS version
+    :type version: str
+    :rtype: None
+    """
+
+    with tempfile.NamedTemporaryFile() as temp_file:
+        with zipfile.ZipFile(temp_file, mode='w') as zip_file:
+            render_universe_zip(
+                zip_file,
+                filter(
+                    lambda package: filter_by_version(package, version),
+                    packages)
+            )
+
+        zip_name = 'repo-up-to-{}.zip'.format(version)
+        shutil.copy(temp_file.name, str(outdir / zip_name))
+
+
+def filter_by_version(package, version):
+    """Prediate for checking for packages of version `version` or less
+
+    :param package: package dictionary
+    :type package: dict
+    :param version: DC/OS version
+    :type version: str
+    :rtype: bool
+    """
+
+    package_version = LooseVersion(
+        package.get('minDcosReleaseVersion', '0.0')
+    )
+
+    filter_version = LooseVersion(version)
+
+    return package_version <= filter_version
+
+
+def package_path(root, package_name, release_version):
+    """Returns the path to the package directory
+
+    :param root: path to the root of the repository
+    :type root: pathlib.Path
+    :param package_name: name of the package
+    :type package_name: str
+    :param release_version: package release version
+    :type release_version: int
+    :rtype: pathlib.Path
+    """
+
+    return (root /
+            package_name[:1].upper() /
+            package_name /
+            str(release_version))
+
+
+def read_package(path):
+    """Reads the package.json as a dict
+
+    :param path: path to the package
+    :type path: pathlib.Path
+    :rtype: dict
+    """
+
+    path = path / 'package.json'
+
+    with path.open(encoding='utf-8') as file_object:
+        return json.load(file_object)
+
+
+def read_resource(path):
+    """Reads the resource.json as a dict
+
+    :param path: path to the package
+    :type path: pathlib.Path
+    :rtype: dict | None
+    """
+
+    path = path / 'resource.json'
+
+    if path.is_file():
+        with path.open(encoding='utf-8') as file_object:
+            return json.load(file_object)
+
+
+def read_marathon_template(path):
+    """Reads the marathon.json.mustache as a base64 encoded string
+
+    :param path: path to the package
+    :type path: pathlib.Path
+    :rtype: str | None
+    """
+
+    path = path / 'marathon.json.mustache'
+
+    if path.is_file():
+        with path.open(mode='rb') as file_object:
+            return base64.standard_b64encode(file_object.read()).decode()
+
+
+def read_config(path):
+    """Reads the config.json as a dict
+
+    :param path: path to the package
+    :type path: pathlib.Path
+    :rtype: dict | None
+    """
+
+    path = path / 'config.json'
+
+    if path.is_file():
+        with path.open(encoding='utf-8') as file_object:
+            # Load config file into a OrderedDict to preserve order
+            return json.load(
+                file_object,
+                object_pairs_hook=collections.OrderedDict
+            )
+
+
+def read_command(path):
+    """Reads the command.json as a dict
+
+    :param path: path to the package
+    :type path: pathlib.Path
+    :rtype: dict | None
+    """
+
+    path = path / 'command.json'
+
+    if path.is_file():
+        with path.open(encoding='utf-8') as file_object:
+            return json.load(file_object)
+
+
+def generate_package_from_path(root, package_name, release_version):
+    """Returns v3 package metadata for the specified package
+
+    :param root: path to the root of the repository
+    :type root: pathlib.Path
+    :param package_name: name of the package
+    :type package_name: str
+    :param release_version: package release version
+    :type release_version: int
+    :rtype: dict
+    """
+
+    path = package_path(root, package_name, release_version)
+    return generate_package(
+        release_version,
+        read_package(path),
+        resource=read_resource(path),
+        marathon_template=read_marathon_template(path),
+        config=read_config(path),
+        command=read_command(path)
+    )
+
+
+def generate_package(
+        release_version,
+        package,
+        resource,
+        marathon_template,
+        config,
+        command):
+    """Returns v3 package object for package. See
+    repo/meta/schema/v3-repo-schema.json
+
+    :param release_version: package release version
+    :type release_version: int
+    :param package: content of package.json
+    :type package: dict
+    :param resource: content of resource.json
+    :type resource: dict | None
+    :param marathon_template: content of marathon.json.template as base64
+    :type marathon_template: str | None
+    :param config: content of config.json
+    :type config: dict | None
+    :param command: content of command.json
+    :type command: dict | None
+    :rtype: dict
+    """
+
+    package = package.copy()
+    package['releaseVersion'] = release_version
+
+    if resource:
+        package['resource'] = resource
+    if marathon_template:
+        package['marathon'] = {
+            'v2AppMustacheTemplate': marathon_template
+        }
+    if config:
+        package['config'] = config
+    if command:
+        package['command'] = command
+
+    return package
+
+
+def enumerate_dcos_packages(packages_path):
+    """Enumerate all of the package and release version to include
+
+    :param packages_path: the path to the root of the packages
+    :type packages_path: str
+    :returns: generator of package name and release version
+    :rtype: gen((str, int))
+    """
+
+    for letter_path in packages_path.iterdir():
+        for package in letter_path.iterdir():
+            for release_version in package.iterdir():
+                yield (package.name, int(release_version.name))
+
+
+def render_universe_zip(zip_file, packages):
+    """Populates a zipfile from a list of universe v3 packages. This function
+    creates directories to be backwards compatible with legacy Cosmos.
+
+    :param zip_file: zipfile where we need to write the packages
+    :type zip_file: zipfile.ZipFile
+    :param packages: list of packages
+    :type packages: [dict]
+    :rtype: None
+    """
+
+    packages = sorted(
+        packages,
+        key=lambda package: (package['name'], package['releaseVersion']))
+
+    root = pathlib.Path('universe')
+
+    create_dir_in_zip(zip_file, root)
+
+    create_dir_in_zip(zip_file, root / 'repo')
+
+    create_dir_in_zip(zip_file, root / 'repo' / 'meta')
+    zip_file.writestr(
+        str(root / 'repo' / 'meta' / 'index.json'),
+        json.dumps(create_index(packages)))
+
+    zip_file.writestr(
+        str(root / 'repo' / 'meta' / 'version.json'),
+        json.dumps({'version': '2.0.0'}))
+
+    packagesDir = root / 'repo' / 'packages'
+    create_dir_in_zip(zip_file, packagesDir)
+
+    currentLetter = ''
+    currentPackageName = ''
+    for package in packages:
+        if currentLetter != package['name'][:1].upper():
+            currentLetter = package['name'][:1].upper()
+            create_dir_in_zip(zip_file, packagesDir / currentLetter)
+
+        if currentPackageName != package['name']:
+            currentPackageName = package['name']
+            create_dir_in_zip(
+                zip_file,
+                packagesDir / currentLetter / currentPackageName)
+
+        package_directory = (
+            packagesDir /
+            currentLetter /
+            currentPackageName /
+            str(package['releaseVersion'])
+        )
+        create_dir_in_zip(zip_file, package_directory)
+
+        write_package_in_zip(zip_file, package_directory, package)
+
+
+def create_dir_in_zip(zip_file, directory):
+    """Create a directory in a zip file
+
+    :param zip_file: zip file where the directory will get created
+    :type zip_file: zipfile.ZipFile
+    :param directory: path for the directory
+    :type directory: pathlib.Path
+    :rtype: None
+    """
+
+    zip_file.writestr(str(directory) + '/', b'')
+
+
+def write_package_in_zip(zip_file, path, package):
+    """Write packages files in the zip file
+
+    :param zip_file: zip file where the files will get created
+    :type zip_file: zipfile.ZipFile
+    :param path: path for the package directory. E.g.
+                 universe/repo/packages/M/marathon/0
+    :type path: pathlib.Path
+    :param package: package information dictionary
+    :type package: dict
+    :rtype: None
+    """
+
+    package = downgrade_package_to_v2(package)
+
+    package.pop('releaseVersion')
+
+    resource = package.pop('resource', None)
+    if resource:
+        zip_file.writestr(
+            str(path / 'resource.json'),
+            json.dumps(resource))
+
+    marathon_template = package.pop(
+        'marathon',
+        {}
+    ).get(
+        'v2AppMustacheTemplate'
+    )
+    if marathon_template:
+        zip_file.writestr(
+            str(path / 'marathon.json.mustache'),
+            base64.standard_b64decode(marathon_template))
+
+    config = package.pop('config', None)
+    if config:
+        zip_file.writestr(
+            str(path / 'config.json'),
+            json.dumps(config))
+
+    command = package.pop('command', None)
+    if command:
+        zip_file.writestr(
+            str(path / 'command.json'),
+            json.dumps(command))
+
+    zip_file.writestr(
+        str(path / 'package.json'),
+        json.dumps(package))
+
+
+def create_index(packages):
+    """Create an index for all of the packages
+
+    :param packages: list of packages
+    :type packages: [dict]
+    :rtype: dict
+    """
+
+    index = {
+        'version': '2.0.0',
+        'packages': [
+            create_index_entry(same_packages)
+            for _, same_packages
+            in itertools.groupby(packages, key=lambda package: package['name'])
+        ]
+    }
+
+    return index
+
+
+def create_index_entry(packages):
+    """Create index entry from packages with the same name.
+
+    :param packages: list of packages with the same name
+    :type packages: [dict]
+    :rtype: dict
+    """
+
+    entry = {
+        'versions': {}
+    }
+
+    for package in packages:
+        entry.update({
+            'name': package['name'],
+            'currentVersion': package['version'],
+            'description': package['description'],
+            'framework': package.get('framework', False),
+            'tags': package['tags'],
+            'selected': package.get('selected', False)
+        })
+
+        entry['versions'][package['version']] = str(package['releaseVersion'])
+
+    return entry
+
+
+def v3_to_v2_package(v3_package):
+    """Converts a v3 package to a v2 package
+
+    :param v3_package: a v3 package
+    :type v3_package: dict
+    :return: a v2 package
+    :rtype: dict
+    """
+    package = copy.deepcopy(v3_package)
+
+    package.pop('minDcosReleaseVersion', None)
+    package['packagingVersion'] = "2.0"
+    resource = package.get('resource', None)
+    if resource:
+        cli = resource.pop('cli', None)
+        if cli and 'command' not in package:
+            print(('WARNING: Removing binary CLI from ({}, {}) without a '
+                  'Python CLI').format(package['name'], package['version']))
+
+    return package
+
+
+def v4_to_v3_package(v4_package):
+    """Converts a v4 package to a v3 package
+
+    :param v4_package: a v3 package
+    :type v4_package: dict
+    :return: a v3 package
+    :rtype: dict
+    """
+    package = copy.deepcopy(v4_package)
+    package.pop('upgradesFrom', None)
+    package.pop('downgradesTo', None)
+    package["packagingVersion"] = "3.0"
+    return package
+
+
+def downgrade_package_to_v2(package):
+    """Converts a v4 or v3 package to a v2 package. If given a v2
+    package, it creates a deep copy but does not modify it. It does not
+    modify the original package.
+
+    :param package: v4, v3, or v2 package
+    :type package: dict
+    :return: a v2 package
+    :rtyte: dict
+    """
+    packaging_version = package.get("packagingVersion")
+    if packaging_version == "2.0":
+        return copy.deepcopy(package)
+    elif packaging_version == "3.0":
+        return v3_to_v2_package(package)
+    else:
+        return v3_to_v2_package(v4_to_v3_package(package))
+
+
+def downgrade_package_to_v3(package):
+    """Converts a v4 package to a v3 package. If given a v3 or v2 package
+    it creates a deep copy of it, but does not modify it. It does not
+    modify the original package.
+
+    :param package: v4, v3, or v2 package
+    :type package: dict
+    :return: a v3 or v2 package
+    :rtyte: dict
+    """
+    packaging_version = package.get("packagingVersion")
+    if packaging_version == "2.0" or packaging_version == "3.0":
+        return copy.deepcopy(package)
+    else:
+        return v4_to_v3_package(package)
+
+
+def validate_repo_with_schema(repo_json_data, repo_version):
+    """Validates a repo and its version against the corresponding schema
+
+    :param repo_json_data: The json of repo
+    :param repo_version: version of the repo (e.g.: v4)
+    :return: list of validation errors ( length == zero => No errors)
+    """
+    validator = jsonschema.Draft4Validator(_load_jsonschema(repo_version))
+    errors = []
+    for error in validator.iter_errors(repo_json_data):
+        for suberror in sorted(error.context, key=lambda e: e.schema_path):
+            errors.append('{}: {}'.format(list(suberror.schema_path), suberror.message))
+    return errors
+
+
+def _validate_repo(file_path, version):
+    """Validates a repo JSON file against the given version.
+
+    :param file_path: the path where the universe was stored
+    :type file_path: str
+    :param version: DC/OS version
+    :type version: str
+    :rtype: None
+    """
+
+    if LooseVersion(version) >= LooseVersion('1.10'):
+        repo_version = 'v4'
+    else:
+        repo_version = 'v3'
+
+    with file_path.open(encoding='utf-8') as repo_file:
+        repo = json.loads(repo_file.read())
+
+    errors = validate_repo_with_schema(repo, repo_version)
+    if len(errors) != 0:
+        sys.exit(
+            'ERROR\n\nRepo {} version {} validation errors: {}'.format(
+                file_path,
+                repo_version,
+                '\n'.join(errors)
+            )
+        )
+
+
+def _load_jsonschema(repo_version):
+    """Opens and parses the repo schema based on the version provided.
+
+    :param repo_version: repo schema version. E.g. v3 vs v4
+    :type repo_version: str
+    :return: the schema dictionary
+    :rtype: dict
+    """
+
+    with open(
+        'repo/meta/schema/{}-repo-schema.json'.format(repo_version),
+        encoding='utf-8'
+    ) as schema_file:
+        return json.loads(schema_file.read())
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/dcos-universe/scripts/generate-config-reference.py b/dcos-universe/scripts/generate-config-reference.py
new file mode 100755
index 0000000..33e5410
--- /dev/null
+++ b/dcos-universe/scripts/generate-config-reference.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+"""This script builds a Markdown file containing configuration references for
+all packages (and all package versions) contained in the Mesosphere DC/OS
+Universe repository. It outputs a single file, 'config-reference.md' in the
+current working directory.
+
+  Usage:  ./generate-config-reference.py [/path/to/universe/repo/packages]
+
+"""
+import json
+import os
+import sys
+
+
+def find_config_files(path):
+    config_files = []
+
+    for root, dirs, files in os.walk(path):
+        for f in files:
+            if f == 'config.json':
+                config_files.append(os.path.join(root, f))
+
+    return config_files
+
+
+def main(path):
+    files = find_config_files(path)
+    config_ref_path = os.path.join(os.getcwd(), 'config-reference.md')
+    outfile = open(config_ref_path, 'w', encoding='utf-8')
+    outfile.write("# DC/OS Universe Package Configuration Reference\n\n")
+
+    for f in files:
+        with open(f, 'r', encoding='utf-8') as config:
+            package_name = f.split('/')[-3]
+            package_version = f.split('/')[-2]
+            outfile.write("## {} version {}\n\n".format(package_name, package_version))
+            props = json.loads(config.read())['properties']
+
+            for key, value in props.items():
+                if key == "properties":
+                    outfile.write("*Errors encountered when processing config properties. Not all properties may be listed here. Please verify the structure of this package and package version.*\n\n")
+                    continue
+
+                outfile.write("### {} configuration properties\n\n".format(key))
+                outfile.write("| Property | Type | Description | Default Value |\n")
+                outfile.write("|----------|------|-------------|---------------|\n")
+
+                for _, prop in value.items():
+                    if type(prop) is not dict:
+                        continue
+                    for key, details in prop.items():
+                        prop = key
+
+                        try:
+                            typ = details['type']
+                        except KeyError:
+                            typ = "*No type provided.*"
+
+                        try:
+                            desc = details['description']
+                        except KeyError:
+                            desc = "*No description provided.*"
+
+                        try:
+                            default = "`{}`".format(details['default'])
+                            if default == "``":
+                                default = "*Empty string.*"
+                        except KeyError:
+                            default = "*No default.*"
+
+                        outfile.write("| {prop} | {typ} | {desc} | {default} |\n".format(
+                            prop=prop, desc=desc, typ=typ, default=default))
+
+                outfile.write("\n")
+
+    outfile.close()
+
+if __name__ == '__main__':
+    if len(sys.argv) == 2:
+        path = sys.argv[1]
+    else:
+        path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../repo/packages')
+
+    main(path)
diff --git a/dcos-universe/scripts/install-git-hooks.sh b/dcos-universe/scripts/install-git-hooks.sh
new file mode 100755
index 0000000..0071ce9
--- /dev/null
+++ b/dcos-universe/scripts/install-git-hooks.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )";
+UNIVERSE_DIR=$SCRIPTS_DIR/..
+HOOKS_DIR=$UNIVERSE_DIR/hooks
+GIT_HOOKS_DIR=$UNIVERSE_DIR/.git/hooks
+
+echo "Installing git hooks...";
+
+for file in $(ls $HOOKS_DIR); do
+  echo "Copying $file";
+  cp "$HOOKS_DIR/$file" $GIT_HOOKS_DIR/;
+  chmod +x "$GIT_HOOKS_DIR/$file";
+done
+
+echo "OK";
+
diff --git a/dcos-universe/scripts/json_dup_key_check.py b/dcos-universe/scripts/json_dup_key_check.py
new file mode 100755
index 0000000..aa62790
--- /dev/null
+++ b/dcos-universe/scripts/json_dup_key_check.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+import os
+import sys
+import json
+
+
+class DuplicatedKeysException(Exception):
+    pass
+
+
+def json_checker(pair):
+    ret = {}
+    for key, value in pair:
+        if key in ret:
+            raise DuplicatedKeysException(
+                "Duplicate key {!r} in json document".format(key))
+        else:
+            ret[key] = value
+    return ret
+
+if len(sys.argv) != 2:
+    sys.stderr.write(
+        "Syntax: {} path/to/file.json\n".format(os.path.basename(__file__)))
+    sys.exit(1)
+
+try:
+    f = open(sys.argv[1], encoding='utf-8')
+    json.load(f, object_pairs_hook=json_checker)
+except DuplicatedKeysException as e:
+    sys.stderr.write("Error validating %s: %s\n" % (sys.argv[1], e.args[0]))
+    sys.exit(1)
diff --git a/dcos-universe/scripts/local-universe.py b/dcos-universe/scripts/local-universe.py
new file mode 100755
index 0000000..a29ed61
--- /dev/null
+++ b/dcos-universe/scripts/local-universe.py
@@ -0,0 +1,475 @@
+#!/usr/bin/env python3
+
+import argparse
+import concurrent.futures
+import contextlib
+import distutils.version
+import fnmatch
+import json
+import os
+import pathlib
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+import urllib.error
+import urllib.parse
+import urllib.request
+
+HTTP_ROOT = "http://master.mesos:8082/"
+DOCKER_ROOT = "master.mesos:5000"
+
+
+def main():
+    # Docker writes files into the tempdir as root, you need to be running
+    # the script as root to clean these up successfully.
+    if os.getuid() != 0:
+        print("You must run this as root, please `sudo` first.")
+        sys.exit(1)
+
+    parser = argparse.ArgumentParser(
+        description='This script is able to download the latest artifacts for '
+        'all of the packages in the Universe repository into a docker image. '
+        'You can control the path to the temporary file by setting the TMPDIR '
+        'environment variable. E.g. TMPDIR=\'.\' ./scripts/local-universe.py '
+        '...')
+    parser.add_argument(
+        '--server_url',
+        default=HTTP_ROOT,
+        help="URL for http server")
+    parser.add_argument(
+        '--repository',
+        required=True,
+        help='Path to the top level package directory. E.g. repo/packages')
+    parser.add_argument(
+        '--include',
+        default='',
+        help='Comma (,) separated list of packages to include. For each '
+        'package specify both the name and version by separating them with a '
+        'colon (:). E.g. --include="marathon:1.4.2,chronos:2.5.0"')
+    parser.add_argument(
+        '--selected',
+        action='store_true',
+        default=False,
+        help='Set this to include only selected packages')
+    parser.add_argument(
+        '--nonlocal_images',
+        action='store_true',
+        default=False,
+        help='Set this to leave images resource URLs untouched.')
+    parser.add_argument(
+        '--nonlocal_cli',
+        action='store_true',
+        default=False,
+        help='Set this to leave CLI resource URLs untouched.')
+    parser.add_argument(
+        '--dcos_version',
+        required=True,
+        help='Set this to the version of DC/OS under which the local universe '
+        'will operate. Ensures that only package versions compatible with '
+        'that DC/OS version are included. This parameter is required.'
+    )
+
+    args = parser.parse_args()
+
+    packages = [
+        tuple(spec.split(':'))
+        for spec in args.include.split(',') if spec != ''
+    ]
+
+    dcos_version = distutils.version.LooseVersion(args.dcos_version)
+
+    with tempfile.TemporaryDirectory() as dir_path, \
+            run_docker_registry(dir_path / pathlib.Path("registry")):
+
+        http_artifacts = dir_path / pathlib.Path("http")
+        docker_artifacts = dir_path / pathlib.Path("registry")
+        repo_artifacts = dir_path / pathlib.Path("universe/repo/packages")
+
+        # There is a race between creating this folder and docker run command
+        # creating this volume
+        os.makedirs(str(docker_artifacts), exist_ok=True)
+
+        os.makedirs(str(http_artifacts))
+        os.makedirs(str(repo_artifacts))
+
+        failed_packages = []
+
+        def handle_package(opts):
+            package, version, path = opts
+            try:
+                prepare_repository(
+                    package,
+                    version,
+                    path,
+                    pathlib.Path(args.repository),
+                    repo_artifacts,
+                    args.server_url,
+                    args.nonlocal_images,
+                    args.nonlocal_cli
+                )
+
+                for url, archive_path in enumerate_http_resources(
+                    package,
+                    version,
+                    path,
+                    args.nonlocal_images,
+                    args.nonlocal_cli
+                ):
+                    add_http_resource(http_artifacts, url, archive_path)
+
+                for name in enumerate_docker_images(path):
+                    download_docker_image(name)
+                    upload_docker_image(name)
+            except (subprocess.CalledProcessError, urllib.error.HTTPError):
+                print('MISSING ASSETS: {}'.format(package))
+                remove_package(package, dir_path)
+                failed_packages.append(package)
+
+            return package
+
+        with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
+            for package in executor.map(
+                handle_package,
+                enumerate_dcos_packages(
+                    pathlib.Path(args.repository),
+                    packages,
+                    args.selected,
+                    dcos_version)):
+                print("Completed: {}".format(package))
+
+        build_repository(
+            pathlib.Path(
+                os.path.dirname(os.path.realpath(__file__)),
+                '..',
+                'scripts'
+            ),
+            pathlib.Path(args.repository),
+            pathlib.Path(dir_path, 'universe')
+        )
+
+        build_universe_docker(pathlib.Path(dir_path))
+
+        if failed_packages:
+            print("Errors: {}".format(failed_packages))
+            print("These packages are not included in the image.")
+
+
+def enumerate_dcos_packages(
+        packages_path,
+        packages,
+        only_selected,
+        dcos_version):
+    """Enumarate all of the package and revision to include
+
+    :param packages_path: the path to the root of the packages
+    :type packages_path: pathlib.Path
+    :param packages: list of the name and version of packages to include. The
+                     first field in the tuple is the name the second field is
+                     the version.
+    :type packages: [(str, str)]
+    :param only_selected: filter the list of packages to only ones that are
+                          selected
+    :type only_selected: boolean
+    :param: dcos_version: filter the list of packages to only ones compatible
+                          with this DC/OS version; if None, do not filter
+    :type dcos_version: distutils.version.LooseVersion | None
+    :returns: generator of package name, package version and path
+    :rtype: gen((str, str, pathlib.Path))
+    """
+
+    pending_packages = packages.copy()
+    for letter_path in packages_path.iterdir():
+        assert len(letter_path.name) == 1 and letter_path.name.isupper()
+
+        for package_path in letter_path.iterdir():
+            revision_paths = list(package_path.iterdir())
+            revision_paths.sort(key=lambda r: int(r.name), reverse=True)
+
+            for index, revision_path in enumerate(revision_paths):
+                package_json = load_json(revision_path / 'package.json')
+                if include_revision(
+                    package_json,
+                    pending_packages,
+                    only_selected,
+                    dcos_version,
+                    index == 0  # Latest package will always have an index of 0
+                ):
+                    # *Mutation*. We enumerated the package so let's remove
+                    # it from our pending list if it exists. It may not exists
+                    # if --selected is used.
+                    key = (package_json['name'], package_json['version'])
+                    if key in pending_packages:
+                        pending_packages.remove(key)
+
+                    yield (
+                        package_json['name'],
+                        package_json['version'],
+                        revision_path
+                    )
+
+    if pending_packages:
+        print("Error: couldn't find the following packages")
+        print(pending_packages)
+        sys.exit(1)
+
+
+def include_revision(
+    package_json,
+    packages,
+    only_selected,
+    dcos_version,
+    is_latest
+):
+    version_pass = version_check(package_json, dcos_version)
+    selected_pass = selected_check(
+        package_json,
+        packages,
+        only_selected,
+        is_latest
+    )
+
+    return version_pass and selected_pass
+
+
+def version_check(package_json, dcos_version):
+    if dcos_version:
+        raw_version = package_json.get('minDcosReleaseVersion')
+        if raw_version:
+            min_version = distutils.version.LooseVersion(raw_version)
+            if dcos_version < min_version:
+                return False
+    return True
+
+
+def selected_check(package_json, packages, only_selected, is_latest):
+    """Return true when:
+    1) The package is selected, "only_selected" is true and it is the latest
+       package.
+    2) The package name and version matches one of the tuples in "packages"
+    """
+    package_name = package_json['name']
+    package_version = package_json['version']
+
+    if only_selected:
+        return is_latest and package_json.get('selected', False)
+
+    return (package_name, package_version) in packages
+
+
+def load_json(json_path):
+    with json_path.open(encoding='utf-8') as json_file:
+        return json.load(json_file)
+
+
+def enumerate_http_resources(
+    package,
+    version,
+    package_path,
+    skip_images,
+    skip_cli
+):
+    resource = load_json(package_path / 'resource.json')
+
+    if not skip_images:
+        for name, url in resource.get('images', {}).items():
+            if name != 'screenshots':
+                yield url, pathlib.Path(package, version, 'images')
+
+    for name, url in resource.get('assets', {}).get('uris', {}).items():
+        yield url, pathlib.Path(package, version, 'uris')
+
+    if not skip_cli:
+        for os_type, arch_dict in \
+                resource.get('cli', {}).get('binaries', {}).items():
+            for arch in arch_dict.items():
+                yield (
+                    arch[1]['url'],
+                    pathlib.Path(package, version, 'uris', os_type)
+                )
+
+    command_path = (package_path / 'command.json')
+    if command_path.exists():
+        commands = load_json(command_path)
+
+        for url in commands.get("pip", []):
+            yield url, pathlib.Path(package, version, 'commands')
+
+
+def enumerate_docker_images(package_path):
+    resource = load_json(package_path / 'resource.json')
+
+    dockers = resource.get('assets', {}).get('container', {}).get('docker', {})
+
+    return (name for _, name in dockers.items())
+
+
+@contextlib.contextmanager
+def run_docker_registry(volume_path):
+    print('Start docker registry.')
+    command = ['docker', 'run', '-d', '-p', '5000:5000', '--name',
+               'registry', '-v', '{}:/var/lib/registry'.format(volume_path),
+               'registry:2.4.1']
+
+    subprocess.check_call(command)
+
+    try:
+        yield
+    finally:
+        print('Stopping docker registry.')
+        command = ['docker', 'rm', '-f', 'registry']
+        subprocess.call(command)
+
+
+def download_docker_image(name):
+    print('Pull docker images: {}'.format(name))
+    command = ['docker', 'pull', name]
+
+    subprocess.check_call(command)
+
+
+def format_image_name(host, name):
+    # Probably has a hostname at the front, get rid of it.
+    if '.' in name.split(':')[0]:
+        return '{}/{}'.format(host, "/".join(name.split("/")[1:]))
+
+    return '{}/{}'.format(host, name)
+
+
+def upload_docker_image(name):
+    print('Pushing docker image: {}'.format(name))
+    command = ['docker', 'tag', name,
+               format_image_name('localhost:5000', name)]
+
+    subprocess.check_call(command)
+
+    command = ['docker', 'push', format_image_name('localhost:5000', name)]
+
+    subprocess.check_call(command)
+
+
+def build_universe_docker(dir_path):
+    print('Building the universe docker container')
+    current_dir = pathlib.Path(
+        os.path.dirname(os.path.realpath(__file__)))
+    shutil.copyfile(
+        str(current_dir / '..' / 'docker' / 'local-universe' / 'Dockerfile'),
+        str(dir_path / 'Dockerfile'))
+
+    command = ['docker', 'build', '-t',
+               'mesosphere/universe:{:.0f}'.format(time.time()),
+               '-t', 'mesosphere/universe:latest', '.']
+
+    subprocess.check_call(command, cwd=str(dir_path))
+
+
+def add_http_resource(dir_path, url, base_path):
+    archive_path = (dir_path / base_path /
+                    pathlib.Path(urllib.parse.urlparse(url).path).name)
+    print('Adding {} at {}.'.format(url, archive_path))
+    os.makedirs(str(archive_path.parent), exist_ok=True)
+    urllib.request.urlretrieve(url, str(archive_path))
+
+
+def prepare_repository(
+    package,
+    version,
+    package_path,
+    source_repo,
+    dest_repo,
+    http_root,
+    skip_images,
+    skip_cli
+):
+    dest_path = dest_repo / package_path.relative_to(source_repo)
+    shutil.copytree(str(package_path), str(dest_path))
+
+    dest_resource = dest_path / 'resource.json'
+    with dest_resource.open('w', encoding='utf-8') as dest_file:
+        resource = load_json(package_path / 'resource.json')
+
+        # Change the root for images (ignore screenshots)
+        if not skip_images and 'images' in resource:
+            resource["images"] = {
+                n: urllib.parse.urljoin(
+                    http_root, str(pathlib.PurePath(
+                        package, version, "images", pathlib.Path(uri).name)))
+                for n, uri in resource.get("images", {}).items() if 'icon' in n}
+
+        # Change the root for asset uris.
+        if 'assets' in resource:
+            resource["assets"]["uris"] = {
+                n: urllib.parse.urljoin(
+                    http_root, str(pathlib.PurePath(
+                        package, version, "uris", pathlib.Path(uri).name)))
+                for n, uri in resource["assets"].get("uris", {}).items()}
+
+        # Change the root for cli uris.
+        if not skip_cli and 'cli' in resource:
+            for os_type, arch_dict in \
+                    resource.get('cli', {}).get('binaries', {}).items():
+                for arch in arch_dict.items():
+                    uri = arch[1]["url"]
+                    arch[1]["url"] = urllib.parse.urljoin(
+                        http_root,
+                        str(
+                            pathlib.PurePath(
+                                package,
+                                version,
+                                "uris",
+                                os_type,
+                                pathlib.Path(uri).name)))
+
+        # Add the local docker repo prefix.
+        if 'assets' in resource:
+            if 'container' in resource["assets"]:
+                resource["assets"]["container"]["docker"] = {
+                    n: format_image_name(DOCKER_ROOT, image_name)
+                    for n, image_name in resource["assets"]["container"].get(
+                        "docker", {}).items()}
+
+        json.dump(resource, dest_file, indent=4)
+
+    command_path = (package_path / 'command.json')
+    if not command_path.exists():
+        return
+
+    dest_command = dest_path / 'command.json'
+    with dest_command.open('w', encoding='utf-8') as dest_file:
+        command = load_json(command_path)
+
+        command['pip'] = [
+            urllib.parse.urljoin(
+                http_root,
+                str(
+                    pathlib.PurePath(
+                        package,
+                        version,
+                        "commands",
+                        pathlib.Path(uri).name
+                    )
+                )
+            )
+            for uri in command.get("pip", [])
+        ]
+        json.dump(command, dest_file, indent=4)
+
+
+def build_repository(scripts_dir, repo_dir, dest_dir):
+    shutil.copytree(str(scripts_dir), str(dest_dir / "scripts"))
+    shutil.copytree(str(repo_dir / '..' / 'meta'),
+                    str(dest_dir / 'repo' / 'meta'))
+
+    command = ["bash", "scripts/build.sh"]
+    subprocess.check_call(command, cwd=str(dest_dir))
+
+
+def remove_package(package, base_dir):
+    for root, dirnames, filenames in os.walk(base_dir):
+        for dirname in fnmatch.filter(dirnames, package):
+            shutil.rmtree(os.path.join(root, dirname))
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/dcos-universe/scripts/old-local-universe.py b/dcos-universe/scripts/old-local-universe.py
new file mode 100644
index 0000000..7ff88f6
--- /dev/null
+++ b/dcos-universe/scripts/old-local-universe.py
@@ -0,0 +1,412 @@
+#!/usr/bin/env python3
+
+import argparse
+import concurrent.futures
+import contextlib
+import distutils.version
+import fnmatch
+import json
+import os
+import pathlib
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+import urllib.error
+import urllib.parse
+import urllib.request
+
+HTTP_ROOT = "http://master.mesos:8082/"
+DOCKER_ROOT = "master.mesos:5000"
+
+
+def main():
+    # Docker writes files into the tempdir as root, you need to be running
+    # the script as root to clean these up successfully.
+    if os.getuid() != 0:
+        print("You must run this as root, please `sudo` first.")
+        sys.exit(1)
+
+    parser = argparse.ArgumentParser(
+        description='This script is able to download the latest artifacts for '
+        'all of the packages in the Universe repository into a docker image. '
+        'You can control the path to the temporary file by setting the TMPDIR '
+        'environment variable. E.g. TMPDIR=\'.\' ./scripts/local-universe.py '
+        '...')
+    parser.add_argument(
+        '--server_url',
+        default=HTTP_ROOT,
+        help="URL for http server")
+    parser.add_argument(
+        '--repository',
+        required=True,
+        help='Path to the top level package directory. E.g. repo/packages')
+    parser.add_argument(
+        '--include',
+        default='',
+        help='Command separated list of packages to include. If this option '
+        'is not specified then all packages are downloaded. E.g. '
+        '--include="marathon,chronos"')
+    parser.add_argument(
+        '--selected',
+        action='store_true',
+        default=False,
+        help='Set this to include only selected packages')
+    parser.add_argument(
+        '--nonlocal_images',
+        action='store_true',
+        default=False,
+        help='Set this to leave images resource URLs untouched.')
+    parser.add_argument(
+        '--nonlocal_cli',
+        action='store_true',
+        default=False,
+        help='Set this to leave CLI resource URLs untouched.')
+    parser.add_argument(
+        '--dcos_version',
+        required=True,
+        help='Set this to the version of DC/OS under which the local universe '
+        'will operate. Ensures that only package versions compatible with '
+        'that DC/OS version are included. This parameter is required.'
+    )
+
+    args = parser.parse_args()
+
+    package_names = [name for name in args.include.split(',') if name != '']
+
+    dcos_version = distutils.version.LooseVersion(args.dcos_version)
+
+    with tempfile.TemporaryDirectory() as dir_path, \
+            run_docker_registry(dir_path / pathlib.Path("registry")):
+
+        http_artifacts = dir_path / pathlib.Path("http")
+        docker_artifacts = dir_path / pathlib.Path("registry")
+        repo_artifacts = dir_path / pathlib.Path("universe/repo/packages")
+
+        # There is a race between creating this folder and docker run command
+        # creating this volume
+        os.makedirs(str(docker_artifacts), exist_ok=True)
+
+        os.makedirs(str(http_artifacts))
+        os.makedirs(str(repo_artifacts))
+
+        failed_packages = []
+
+        def handle_package(opts):
+            package, path = opts
+            try:
+                prepare_repository(
+                    package,
+                    path,
+                    pathlib.Path(args.repository),
+                    repo_artifacts,
+                    args.server_url,
+                    args.nonlocal_images,
+                    args.nonlocal_cli
+                )
+
+                for url, archive_path in enumerate_http_resources(
+                    package,
+                    path,
+                    args.nonlocal_images,
+                    args.nonlocal_cli
+                ):
+                    add_http_resource(http_artifacts, url, archive_path)
+
+                for name in enumerate_docker_images(path):
+                    download_docker_image(name)
+                    upload_docker_image(name)
+            except (subprocess.CalledProcessError, urllib.error.HTTPError):
+                print('MISSING ASSETS: {}'.format(package))
+                remove_package(package, dir_path)
+                failed_packages.append(package)
+
+            return package
+
+        with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
+            for package in executor.map(
+                handle_package,
+                enumerate_dcos_packages(
+                    pathlib.Path(args.repository),
+                    package_names,
+                    args.selected,
+                    dcos_version)):
+                print("Completed: {}".format(package))
+
+        build_repository(
+            pathlib.Path(
+                os.path.dirname(os.path.realpath(__file__)),
+                '..',
+                'scripts'
+            ),
+            pathlib.Path(args.repository),
+            pathlib.Path(dir_path, 'universe')
+        )
+
+        build_universe_docker(pathlib.Path(dir_path))
+
+        if failed_packages:
+            print("Errors: {}".format(failed_packages))
+            print("These packages are not included in the image.")
+
+
+def enumerate_dcos_packages(
+        packages_path,
+        package_names,
+        only_selected,
+        dcos_version):
+    """Enumarate all of the package and revision to include
+
+    :param packages_path: the path to the root of the packages
+    :type packages_path: pathlib.Path
+    :param package_names: list of package to include. empty list means all
+                         packages
+    :type package_names: [str]
+    :param only_selected: filter the list of packages to only ones that are
+                          selected
+    :type only_selected: boolean
+    :param: dcos_version: filter the list of packages to only ones compatible
+                          with this DC/OS version; if None, do not filter
+    :type dcos_version: distutils.version.LooseVersion | None
+    :returns: generator of package name and revision
+    :rtype: gen((str, pathlib.Path))
+    """
+    def version_check(package_json):
+        if dcos_version:
+            raw_version = package_json.get('minDcosReleaseVersion')
+            if raw_version:
+                min_version = distutils.version.LooseVersion(raw_version)
+                if dcos_version < min_version:
+                    return False
+        return True
+
+
+    def selected_check(package_name, package_json):
+        if only_selected:
+            return package_json.get('selected', False)
+        return not package_names or package_name in package_names
+
+
+    def include_revision(package_name, revision_path):
+        json_path = revision_path / 'package.json'
+        with json_path.open(encoding='utf-8') as json_file:
+            package_json = json.load(json_file)
+
+            version_pass = version_check(package_json)
+            selected_pass = selected_check(package_name, package_json)
+
+            return version_pass and selected_pass
+
+
+    for letter_path in packages_path.iterdir():
+        assert len(letter_path.name) == 1 and letter_path.name.isupper()
+
+        for package_path in letter_path.iterdir():
+            revision_paths = list(package_path.iterdir())
+            revision_paths.sort(key=lambda r: int(r.name), reverse=True)
+
+            # Include only the first acceptable revision
+            for revision_path in revision_paths:
+                if include_revision(package_path.name, revision_path):
+                    yield (package_path.name, revision_path)
+                    break
+
+
+def enumerate_http_resources(package, package_path, skip_images, skip_cli):
+    resource_path = package_path / 'resource.json'
+    with resource_path.open(encoding='utf-8') as json_file:
+        resource = json.load(json_file)
+
+    if not skip_images:
+        for name, url in resource.get('images', {}).items():
+            if name != 'screenshots':
+                yield url, pathlib.Path(package, 'images')
+
+    for name, url in resource.get('assets', {}).get('uris', {}).items():
+        yield url, pathlib.Path(package, 'uris')
+
+    if not skip_cli:
+        for os_type, arch_dict in \
+                resource.get('cli', {}).get('binaries', {}).items():
+            for arch in arch_dict.items():
+                yield arch[1]['url'], pathlib.Path(package, 'uris', os_type)
+
+    command_path = (package_path / 'command.json')
+    if command_path.exists():
+        with command_path.open(encoding='utf-8') as json_file:
+            commands = json.load(json_file)
+
+        for url in commands.get("pip", []):
+            yield url, pathlib.Path(package, 'commands')
+
+
+def enumerate_docker_images(package_path):
+    resource_path = package_path / 'resource.json'
+    with resource_path.open(encoding='utf-8') as json_file:
+        resource = json.load(json_file)
+
+    dockers = resource.get('assets', {}).get('container', {}).get('docker', {})
+
+    return (name for _, name in dockers.items())
+
+
+@contextlib.contextmanager
+def run_docker_registry(volume_path):
+    print('Start docker registry.')
+    command = ['docker', 'run', '-d', '-p', '5000:5000', '--name',
+               'registry', '-v', '{}:/var/lib/registry'.format(volume_path),
+               'registry:2.4.1']
+
+    subprocess.check_call(command)
+
+    try:
+        yield
+    finally:
+        print('Stopping docker registry.')
+        command = ['docker', 'rm', '-f', 'registry']
+        subprocess.call(command)
+
+
+def download_docker_image(name):
+    print('Pull docker images: {}'.format(name))
+    command = ['docker', 'pull', name]
+
+    subprocess.check_call(command)
+
+
+def format_image_name(host, name):
+    # Probably has a hostname at the front, get rid of it.
+    if '.' in name.split(':')[0]:
+        return '{}/{}'.format(host, "/".join(name.split("/")[1:]))
+
+    return '{}/{}'.format(host, name)
+
+
+def upload_docker_image(name):
+    print('Pushing docker image: {}'.format(name))
+    command = ['docker', 'tag', name,
+               format_image_name('localhost:5000', name)]
+
+    subprocess.check_call(command)
+
+    command = ['docker', 'push', format_image_name('localhost:5000', name)]
+
+    subprocess.check_call(command)
+
+
+def build_universe_docker(dir_path):
+    print('Building the universe docker container')
+    current_dir = pathlib.Path(
+        os.path.dirname(os.path.realpath(__file__)))
+    shutil.copyfile(
+        str(current_dir / '..' / 'docker' / 'local-universe' / 'Dockerfile'),
+        str(dir_path / 'Dockerfile'))
+
+    command = ['docker', 'build', '-t',
+               'mesosphere/universe:{:.0f}'.format(time.time()),
+               '-t', 'mesosphere/universe:latest', '.']
+
+    subprocess.check_call(command, cwd=str(dir_path))
+
+
+def add_http_resource(dir_path, url, base_path):
+    archive_path = (dir_path / base_path /
+                    pathlib.Path(urllib.parse.urlparse(url).path).name)
+    print('Adding {} at {}.'.format(url, archive_path))
+    os.makedirs(str(archive_path.parent), exist_ok=True)
+    urllib.request.urlretrieve(url, str(archive_path))
+
+
+def prepare_repository(
+    package, package_path,
+    source_repo, dest_repo,
+    http_root,
+    skip_images,
+    skip_cli
+):
+    dest_path = dest_repo / package_path.relative_to(source_repo)
+    shutil.copytree(str(package_path), str(dest_path))
+
+    source_resource = package_path / 'resource.json'
+    dest_resource = dest_path / 'resource.json'
+    with source_resource.open(encoding='utf-8') as source_file, \
+            dest_resource.open('w', encoding='utf-8') as dest_file:
+        resource = json.load(source_file)
+
+        # Change the root for images (ignore screenshots)
+        if not skip_images and 'images' in resource:
+            resource["images"] = {
+                n: urllib.parse.urljoin(
+                    http_root, str(pathlib.PurePath(
+                        package, "images", pathlib.Path(uri).name)))
+                for n, uri in resource.get("images", {}).items() if 'icon' in n}
+
+        # Change the root for asset uris.
+        if 'assets' in resource:
+            resource["assets"]["uris"] = {
+                n: urllib.parse.urljoin(
+                    http_root, str(pathlib.PurePath(
+                        package, "uris", pathlib.Path(uri).name)))
+                for n, uri in resource["assets"].get("uris", {}).items()}
+
+        # Change the root for cli uris.
+        if not skip_cli and 'cli' in resource:
+            for os_type, arch_dict in \
+                    resource.get('cli', {}).get('binaries', {}).items():
+                for arch in arch_dict.items():
+                    uri = arch[1]["url"]
+                    arch[1]["url"] = urllib.parse.urljoin(
+                        http_root,
+                        str(
+                            pathlib.PurePath(
+                                package,
+                                "uris",
+                                os_type,
+                                pathlib.Path(uri).name)))
+
+        # Add the local docker repo prefix.
+        if 'assets' in resource:
+            if 'container' in resource["assets"]:
+                resource["assets"]["container"]["docker"] = {
+                    n: format_image_name(DOCKER_ROOT, image_name)
+                    for n, image_name in resource["assets"]["container"].get(
+                        "docker", {}).items()}
+
+        json.dump(resource, dest_file, indent=4)
+
+    command_path = (package_path / 'command.json')
+    if not command_path.exists():
+        return
+
+    dest_command = dest_path / 'command.json'
+    with command_path.open(encoding='utf-8') as source_file, \
+            dest_command.open('w', encoding='utf-8') as dest_file:
+        command = json.load(source_file)
+
+        command['pip'] = [
+            urllib.parse.urljoin(
+                http_root, str(pathlib.PurePath(
+                    package, "commands", pathlib.Path(uri).name)))
+            for uri in command.get("pip", [])
+        ]
+        json.dump(command, dest_file, indent=4)
+
+
+def build_repository(scripts_dir, repo_dir, dest_dir):
+    shutil.copytree(str(scripts_dir), str(dest_dir / "scripts"))
+    shutil.copytree(str(repo_dir / '..' / 'meta'),
+                    str(dest_dir / 'repo' / 'meta'))
+
+    command = ["bash", "scripts/build.sh"]
+    subprocess.check_call(command, cwd=str(dest_dir))
+
+
+def remove_package(package, base_dir):
+    for root, dirnames, filenames in os.walk(base_dir):
+        for dirname in fnmatch.filter(dirnames, package):
+            shutil.rmtree(os.path.join(root, dirname))
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/dcos-universe/scripts/publish-local-universe.sh b/dcos-universe/scripts/publish-local-universe.sh
new file mode 100755
index 0000000..7fd68cb
--- /dev/null
+++ b/dcos-universe/scripts/publish-local-universe.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+set -o errexit -o nounset -o pipefail
+
+SCRIPTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+REPO_BASE_DIR=${SCRIPTS_DIR}/..
+
+S3_DEPLOY_BUCKET="s3://downloads.mesosphere.io/universe/public/"
+
+echo "Uploading local universe to: ${S3_DEPLOY_BUCKET}"
+aws --region us-east-1 s3 cp "${REPO_BASE_DIR}"/docker/local-universe/local-universe.tar.gz "${S3_DEPLOY_BUCKET}"
diff --git a/dcos-universe/scripts/requirements/requirements.txt b/dcos-universe/scripts/requirements/requirements.txt
new file mode 100644
index 0000000..88562b1
--- /dev/null
+++ b/dcos-universe/scripts/requirements/requirements.txt
@@ -0,0 +1 @@
+jsonschema==2.6.0
diff --git a/dcos-universe/scripts/validate_packages.py b/dcos-universe/scripts/validate_packages.py
new file mode 100755
index 0000000..75519e5
--- /dev/null
+++ b/dcos-universe/scripts/validate_packages.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python3
+
+import json
+import jsonschema
+import os
+import re
+import sys
+from distutils.version import LooseVersion
+
+SCRIPTS_DIR = os.path.dirname(os.path.realpath(__file__))
+UNIVERSE_DIR = os.path.join(SCRIPTS_DIR, "..")
+PKG_DIR = os.path.join(UNIVERSE_DIR, "repo/packages")
+SCHEMA_DIR = os.path.join(UNIVERSE_DIR, "repo/meta/schema")
+LETTER_PATTERN = re.compile("^[A-Z]$")
+PACKAGE_FOLDER_PATTERN = re.compile("^[a-z][a-z0-9-]*[a-z0-9]$")
+
+
+def eprint(*args, **kwargs):
+    print(*args, file=sys.stderr, **kwargs)
+
+
+def _get_json_schema(file_name):
+    with open(os.path.join(SCHEMA_DIR, file_name), encoding='utf-8') as f:
+        return json.loads(f.read())
+
+PACKAGE_JSON_SCHEMA = _get_json_schema('package-schema.json')
+COMMAND_JSON_SCHEMA = _get_json_schema('command-schema.json')
+CONFIG_JSON_SCHEMA = _get_json_schema('config-schema.json')
+V2_RESOURCE_JSON_SCHEMA = _get_json_schema('v2-resource-schema.json')
+V3_RESOURCE_JSON_SCHEMA = _get_json_schema('v3-resource-schema.json')
+
+
+def main():
+    # traverse prefix dirs ("A", "B", etc)
+    for letter in os.listdir(PKG_DIR):
+        if not LETTER_PATTERN.match(letter):
+            sys.exit(
+                "\tERROR\n\n"
+                "Invalid name for directory : {}\nName should match the "
+                "pattern : {}".format(letter, LETTER_PATTERN.pattern)
+            )
+        prefix_path = os.path.join(PKG_DIR, letter)
+        # traverse each package dir directory (e.g., "cassandra")
+        for given_package in os.listdir(prefix_path):
+            package_path = os.path.join(prefix_path, given_package)
+            _validate_package(given_package, package_path)
+
+    eprint("\nEverything OK!")
+
+
+def _validate_package(given_package, path):
+    eprint("Validating {}...".format(given_package))
+    for rev in os.listdir(path):
+        _validate_revision(given_package, rev, os.path.join(path, rev))
+
+
+def _validate_revision(given_package, revision, path):
+    eprint("\tValidating revision {}...".format(revision))
+
+    # validate package.json
+    package_json_path = os.path.join(path, 'package.json')
+    eprint("\t\tpackage.json:", end='')
+    if not os.path.isfile(package_json_path):
+        sys.exit("\tERROR\n\nMissing required package.json file")
+    package_json = _validate_json(package_json_path, PACKAGE_JSON_SCHEMA)
+    package_name = package_json.get("name")
+    _validate_package_with_directory(given_package, package_name)
+    eprint("\tOK")
+
+    packaging_version = package_json.get("packagingVersion", "2.0")
+
+    # validate upgrades version
+    min_dcos_release_version = package_json.get("minDcosReleaseVersion", "0.0")
+    upgrades_from = package_json.get("upgradesFrom", None)
+    downgrades_to = package_json.get("downgradesTo", None)
+    if (packaging_version == "4.0" and
+            (upgrades_from or downgrades_to) and
+            LooseVersion(min_dcos_release_version) < LooseVersion("1.10")):
+        # Note: We are going to allow this package state and as a result the
+        # conversion from v4 to v3. Even though this conversion loses
+        # information, the only consumers of the Universe repo API is "Cosmos
+        # the service manager". Old (< 1.10) Cosmos client don't implement the
+        # update API and new Cosmos (>= 1.10), which implement the update API
+        # will use the new repo media type.
+        #
+        # It is important that "package managers" (e.g. Local Universe) cannot
+        # see this converted package and instead always see the original v4
+        # package.
+        pass
+
+    # validate command.json
+    command_json_path = os.path.join(path, 'command.json')
+    command_json = None
+    if os.path.isfile(command_json_path):
+        eprint("\t\tcommand.json:", end='')
+        if packaging_version == "4.0":
+            sys.exit(
+                "\tERROR\n\n"
+                "Command file is not support for version 4.0 packages"
+            )
+        else:
+            command_json = _validate_json(
+                command_json_path,
+                COMMAND_JSON_SCHEMA
+            )
+        eprint("\tOK")
+
+    # validate config.json
+    config_json_path = os.path.join(path, 'config.json')
+    if os.path.isfile(config_json_path):
+        eprint("\t\tconfig.json:", end='')
+        _validate_json(config_json_path, CONFIG_JSON_SCHEMA)
+        eprint("\tOK")
+
+    # validate existence of required marathon.json for v2
+    if packaging_version == "2.0":
+        marathon_json_path = os.path.join(path, 'marathon.json.mustache')
+        eprint("\t\tmarathon.json.mustache:", end='')
+        if not os.path.isfile(marathon_json_path):
+            sys.exit("\tERROR\n\nMissing required marathon.json.mustache")
+        eprint("\tOK")
+
+    # validate resource.json
+    resource_json_path = os.path.join(path, 'resource.json')
+    resource_json = None
+    if os.path.isfile(resource_json_path):
+        eprint("\t\tresource.json:", end='')
+        if packaging_version == "2.0":
+            resource_json = _validate_json(
+                resource_json_path,
+                V2_RESOURCE_JSON_SCHEMA)
+        else:
+            resource_json = _validate_json(
+                resource_json_path,
+                V3_RESOURCE_JSON_SCHEMA)
+        eprint("\tOK")
+
+    # Validate that we don't drop information during the conversion
+    old_package = LooseVersion(
+        package_json.get('minDcosReleaseVersion', "1.0")) < LooseVersion("1.8")
+    if (old_package and resource_json and 'cli' in resource_json and
+            command_json is None):
+        sys.exit('\tERROR\n\nA package with CLI specified in resource.json is '
+                 'only supported when minDcosReleaseVersion is greater than '
+                 '1.8.')
+
+
+def _validate_package_with_directory(given_package, actual_package_name):
+    if not PACKAGE_FOLDER_PATTERN.match(given_package):
+        sys.exit(
+            "\tERROR\n\n"
+            "Invalid name for package directory : {}"
+            "\nName should match the pattern : {}"
+            .format(given_package, PACKAGE_FOLDER_PATTERN.pattern)
+        )
+    if given_package != actual_package_name:
+        sys.exit(
+            "\tERROR\n\n"
+            "The name parameter in package.json should match with the name of "
+            "the package directory.\nDirectory : {}, Parsed Name : {}"
+            .format(given_package, actual_package_name)
+        )
+
+
+def _validate_json(path, schema):
+        with open(path, encoding='utf-8') as f:
+            data = json.loads(f.read())
+
+        _validate_jsonschema(data, schema)
+        return data
+
+
+def _validate_jsonschema(instance, schema):
+    validator = jsonschema.Draft4Validator(schema)
+    errors = list(validator.iter_errors(instance))
+    if len(errors) != 0:
+        sys.exit("\tERROR\n\nValidation error: {}".format(errors))
+
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/tools/travis/scancode.sh b/tools/travis/scancode.sh
new file mode 100644
index 0000000..ebfa361
--- /dev/null
+++ b/tools/travis/scancode.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+set -e
+
+# Build script for Travis-CI.
+SCRIPTDIR=$(cd $(dirname "$0") && pwd)
+ROOTDIR="$SCRIPTDIR/../.."
+UTIL_DIR="$ROOTDIR/../incubator-openwhisk-utilities"
+
+# run scancode
+cd $UTIL_DIR
+scancode/scanCode.py $ROOTDIR
diff --git a/tools/travis/setupscan.sh b/tools/travis/setupscan.sh
new file mode 100644
index 0000000..35f070f
--- /dev/null
+++ b/tools/travis/setupscan.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+SCRIPTDIR=$(cd $(dirname "$0") && pwd)
+HOMEDIR="$SCRIPTDIR/../../../"
+
+# clone OpenWhisk utilities repo. in order to run scanCode.py
+cd $HOMEDIR
+git clone https://github.com/apache/incubator-openwhisk-utilities.git

-- 
To stop receiving notification emails like this one, please contact
dragos@apache.org.