You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@apisix.apache.org by ch...@apache.org on 2020/10/13 08:07:09 UTC

[apisix-dashboard] branch refactor updated: feature: sync json schema from APISIX and check schema when create or update resource (#551)

This is an automated email from the ASF dual-hosted git repository.

chenjunxu pushed a commit to branch refactor
in repository https://gitbox.apache.org/repos/asf/apisix-dashboard.git


The following commit(s) were added to refs/heads/refactor by this push:
     new 08ae8c1  feature: sync json schema from APISIX and check schema when create or update resource (#551)
08ae8c1 is described below

commit 08ae8c14f2208dad7793f904e9bbc0bad762bfa2
Author: nic-chen <33...@users.noreply.github.com>
AuthorDate: Tue Oct 13 16:06:57 2020 +0800

    feature: sync json schema from APISIX and check schema when create or update resource (#551)
    
    * feat: json schema check
    
    * fix: don't need to define struct for each resource, because that may cause json schema check fail.
    
    * test: add handler test cases
    
    * test: complete consumer test cases
    
    * test: add test cases for schema check
    
    * fix code style and license
    
    * feat: add schema check for plugins
    
    * test: add ssl handler test cases
    
    * test: add test cases for upstream and service
    
    * test: add test cases for route
    
    * test: add note for route create
    
    * test: update CI
    
    * fix: remove useless file
    
    * test: fix CI
    
    * fix: ci fail
    
    * test: fix lib `dag-to-lua`'s path in CI
    
    * fix: URI for route may be empty
    
    * fix: remove empty lines
    
    * fix: refactor validator of json schema
    
    * fix code style
    
    * fix cicd
    
    * chore: update docker file
    
    * fix: should check schema after id generated
    
    * fix code style
    
    * chore: page_number -> page
    
    * fix: schema sync script
---
 .github/workflows/api_ci.yml                   |  14 +-
 .github/workflows/api_cicd.yml                 |  14 +-
 api/Dockerfile                                 |  14 +
 api/build-tools/json.lua                       | 400 +++++++++++++
 api/build-tools/schema-sync.lua                | 133 +++++
 api/conf/conf.go                               |  34 +-
 api/conf/schema.json                           |   1 +
 api/internal/core/entity/entity.go             |  76 +--
 api/internal/core/store/query.go               |   4 +-
 api/internal/core/store/store.go               |   8 +-
 api/internal/core/store/storehub.go            |   8 +
 api/internal/core/store/validate.go            |  92 ++-
 api/internal/core/store/validate_test.go       |  75 ++-
 api/internal/handler/consumer/consumer.go      |   3 +-
 api/internal/handler/consumer/consumer_test.go | 182 ++++++
 api/internal/handler/route/route.go            |  30 +-
 api/internal/handler/route/route_test.go       | 766 +++++++++++++++++++++++++
 api/internal/handler/service/service.go        |   6 +-
 api/internal/handler/service/service_test.go   | 128 +++++
 api/internal/handler/ssl/ssl.go                |   5 +-
 api/internal/handler/ssl/ssl_test.go           | 101 ++++
 api/internal/handler/upstream/upstream.go      |   6 +-
 api/internal/handler/upstream/upstream_test.go | 183 ++++++
 23 files changed, 2206 insertions(+), 77 deletions(-)

diff --git a/.github/workflows/api_ci.yml b/.github/workflows/api_ci.yml
index 32206f7..907e0e6 100644
--- a/.github/workflows/api_ci.yml
+++ b/.github/workflows/api_ci.yml
@@ -36,9 +36,9 @@ jobs:
     - name: get lua lib
       run: |
         wget https://github.com/api7/dag-to-lua/archive/v1.1.tar.gz
-        sudo mkdir -p /go/api7-manager-api/dag-to-lua/
+        sudo mkdir -p /go/manager-api/dag-to-lua/
         tar -zxvf v1.1.tar.gz
-        sudo mv ./dag-to-lua-1.1/lib/* /go/api7-manager-api/dag-to-lua/
+        sudo mv ./dag-to-lua-1.1/lib/* /go/manager-api/dag-to-lua/
 
     - name: install runtime
       run: |
@@ -49,6 +49,16 @@ jobs:
         export GO111MOUDULE=on
         sudo apt install golang-1.14-go
 
+    - name: generate json schema
+      working-directory: ./api
+      run: |
+        wget https://github.com/apache/apisix/archive/master.zip
+        mkdir ./build-tools/apisix/
+        unzip master.zip
+        sudo mv ./apisix-master/apisix/* ./build-tools/apisix/
+        rm -rf ./apisix-master
+        cd ./build-tools/ && lua schema-sync.lua > ../conf/schema.json
+
     - name: run test
       working-directory: ./api
       run: |
diff --git a/.github/workflows/api_cicd.yml b/.github/workflows/api_cicd.yml
index abe0289..481c8cc 100644
--- a/.github/workflows/api_cicd.yml
+++ b/.github/workflows/api_cicd.yml
@@ -51,9 +51,9 @@ jobs:
     - name: get lua lib
       run: |
         wget https://github.com/api7/dag-to-lua/archive/v1.1.tar.gz
-        sudo mkdir -p /go/api7-manager-api/dag-to-lua/
+        sudo mkdir -p /go/manager-api/dag-to-lua/
         tar -zxvf v1.1.tar.gz
-        sudo mv ./dag-to-lua-1.1/lib/* /go/api7-manager-api/dag-to-lua/
+        sudo mv ./dag-to-lua-1.1/lib/* /go/manager-api/dag-to-lua/
 
     - name: install runtime
       run: |
@@ -64,6 +64,16 @@ jobs:
         export GO111MOUDULE=on
         sudo apt install golang-1.14-go
 
+    - name: generate json schema
+      working-directory: ./api
+      run: |
+        wget https://github.com/apache/apisix/archive/master.zip
+        mkdir ./build-tools/apisix/
+        unzip master.zip
+        sudo mv ./apisix-master/apisix/* ./build-tools/apisix/
+        rm -rf ./apisix-master
+        cd ./build-tools/ && lua schema-sync.lua > ../conf/schema.json
+
     - uses: Azure/docker-login@v1
       with:
         login-server: apisixacr.azurecr.cn
diff --git a/api/Dockerfile b/api/Dockerfile
index 9bb22ff..d8e7c6d 100644
--- a/api/Dockerfile
+++ b/api/Dockerfile
@@ -20,10 +20,12 @@ FROM golang:1.13.8 AS build-env
 WORKDIR /go/src/github.com/apisix/manager-api
 COPY . .
 RUN mkdir /go/manager-api \
+    && mkdir /go/manager-api/build-tools \
     && go env -w GOPROXY=https://goproxy.io,direct \
     && export GOPROXY=https://goproxy.io \
     && go build -o /go/manager-api/manager-api \
     && mv /go/src/github.com/apisix/manager-api/build.sh /go/manager-api/ \
+    && mv /go/src/github.com/apisix/manager-api/build-tools/* /go/manager-api/build-tools/ \
     && mv /go/src/github.com/apisix/manager-api/conf/conf_preview.json /go/manager-api/conf.json \
     && rm -rf /go/src/github.com/apisix/manager-api \
     && rm -rf /etc/localtime \
@@ -35,6 +37,12 @@ RUN wget https://github.com/api7/dag-to-lua/archive/v1.1.tar.gz \
     && mkdir /go/manager-api/dag-to-lua \
     && mv ./dag-to-lua-1.1/lib/* /go/manager-api/dag-to-lua/
 
+RUN  wget https://github.com/apache/apisix/archive/master.zip \
+     && mkdir /go/manager-api/build-tools/apisix \
+     && apt-get update && apt-get install zip -y \
+     && unzip master.zip \
+     && mv ./apisix-master/apisix/* /go/manager-api/build-tools/apisix/
+
 FROM alpine:3.11
 
 RUN mkdir -p /go/manager-api \
@@ -50,6 +58,12 @@ RUN apk add lua5.1
 WORKDIR /go/manager-api
 COPY --from=build-env /go/manager-api/ /go/manager-api/
 COPY --from=build-env /usr/share/zoneinfo/Hongkong /etc/localtime
+
+RUN  cd /go/manager-api/build-tools \
+     && lua schema-sync.lua > /go/manager-api/schema.json \
+     && cd /go/manager-api/ \
+     && rm -rf /go/manager-api/build-tools/
+
 EXPOSE 8080
 RUN chmod +x ./build.sh
 CMD ["/bin/ash", "-c", "/go/manager-api/build.sh"]
diff --git a/api/build-tools/json.lua b/api/build-tools/json.lua
new file mode 100644
index 0000000..720b029
--- /dev/null
+++ b/api/build-tools/json.lua
@@ -0,0 +1,400 @@
+--
+-- json.lua
+--
+-- Copyright (c) 2020 rxi
+--
+-- Permission is hereby granted, free of charge, to any person obtaining a copy of
+-- this software and associated documentation files (the "Software"), to deal in
+-- the Software without restriction, including without limitation the rights to
+-- use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+-- of the Software, and to permit persons to whom the Software is furnished to do
+-- so, subject to the following conditions:
+--
+-- The above copyright notice and this permission notice shall be included in all
+-- copies or substantial portions of the Software.
+--
+-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+-- SOFTWARE.
+--
+local string   = string
+local error    = error
+local rawget   = rawget
+local next     = next
+local pairs    = pairs
+local type     = type
+local ipairs   = ipairs
+local table    = table
+local math     = math
+local tostring = tostring
+local select   = select
+local tonumber = tonumber
+
+local json = { _version = "0.1.2" }
+
+-------------------------------------------------------------------------------
+-- Encode
+-------------------------------------------------------------------------------
+
+local encode
+
+local escape_char_map = {
+  [ "\\" ] = "\\",
+  [ "\"" ] = "\"",
+  [ "\b" ] = "b",
+  [ "\f" ] = "f",
+  [ "\n" ] = "n",
+  [ "\r" ] = "r",
+  [ "\t" ] = "t",
+}
+
+local escape_char_map_inv = { [ "/" ] = "/" }
+for k, v in pairs(escape_char_map) do
+  escape_char_map_inv[v] = k
+end
+
+
+local function escape_char(c)
+  return "\\" .. (escape_char_map[c] or string.format("u%04x", c:byte()))
+end
+
+
+local function encode_nil(val)
+  return "null"
+end
+
+
+local function encode_table(val, stack)
+  local res = {}
+  stack = stack or {}
+
+  -- Circular reference?
+  if stack[val] then error("circular reference") end
+
+  stack[val] = true
+
+  if rawget(val, 1) ~= nil or next(val) == nil then
+    -- Treat as array -- check keys are valid and it is not sparse
+    local n = 0
+    for k in pairs(val) do
+      if type(k) ~= "number" then
+        error("invalid table: mixed or invalid key types")
+      end
+      n = n + 1
+    end
+    if n ~= #val then
+      error("invalid table: sparse array")
+    end
+    -- Encode
+    for i, v in ipairs(val) do
+      table.insert(res, encode(v, stack))
+    end
+    stack[val] = nil
+    return "[" .. table.concat(res, ",") .. "]"
+
+  else
+    -- Treat as an object
+    for k, v in pairs(val) do
+      if type(k) ~= "string" then
+        error("invalid table: mixed or invalid key types")
+      end
+      table.insert(res, encode(k, stack) .. ":" .. encode(v, stack))
+    end
+    stack[val] = nil
+    return "{" .. table.concat(res, ",") .. "}"
+  end
+end
+
+
+local function encode_string(val)
+  return '"' .. val:gsub('[%z\1-\31\\"]', escape_char) .. '"'
+end
+
+
+local function encode_number(val)
+  -- Check for NaN, -inf and inf
+  if val ~= val or val <= -math.huge or val >= math.huge then
+    error("unexpected number value '" .. tostring(val) .. "'")
+  end
+  return string.format("%.14g", val)
+end
+
+
+local type_func_map = {
+  [ "nil"     ] = encode_nil,
+  [ "table"   ] = encode_table,
+  [ "string"  ] = encode_string,
+  [ "number"  ] = encode_number,
+  [ "boolean" ] = tostring,
+}
+
+
+encode = function(val, stack)
+  local t = type(val)
+  local f = type_func_map[t]
+  if f then
+    return f(val, stack)
+  end
+  error("unexpected type '" .. t .. "'")
+end
+
+
+function json.encode(val)
+  return ( encode(val) )
+end
+
+
+-------------------------------------------------------------------------------
+-- Decode
+-------------------------------------------------------------------------------
+
+local parse
+
+local function create_set(...)
+  local res = {}
+  for i = 1, select("#", ...) do
+    res[ select(i, ...) ] = true
+  end
+  return res
+end
+
+local space_chars   = create_set(" ", "\t", "\r", "\n")
+local delim_chars   = create_set(" ", "\t", "\r", "\n", "]", "}", ",")
+local escape_chars  = create_set("\\", "/", '"', "b", "f", "n", "r", "t", "u")
+local literals      = create_set("true", "false", "null")
+
+local literal_map = {
+  [ "true"  ] = true,
+  [ "false" ] = false,
+  [ "null"  ] = nil,
+}
+
+
+local function next_char(str, idx, set, negate)
+  for i = idx, #str do
+    if set[str:sub(i, i)] ~= negate then
+      return i
+    end
+  end
+  return #str + 1
+end
+
+
+local function decode_error(str, idx, msg)
+  local line_count = 1
+  local col_count = 1
+  for i = 1, idx - 1 do
+    col_count = col_count + 1
+    if str:sub(i, i) == "\n" then
+      line_count = line_count + 1
+      col_count = 1
+    end
+  end
+  error( string.format("%s at line %d col %d", msg, line_count, col_count) )
+end
+
+
+local function codepoint_to_utf8(n)
+  -- http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=iws-appendixa
+  local f = math.floor
+  if n <= 0x7f then
+    return string.char(n)
+  elseif n <= 0x7ff then
+    return string.char(f(n / 64) + 192, n % 64 + 128)
+  elseif n <= 0xffff then
+    return string.char(f(n / 4096) + 224, f(n % 4096 / 64) + 128, n % 64 + 128)
+  elseif n <= 0x10ffff then
+    return string.char(f(n / 262144) + 240, f(n % 262144 / 4096) + 128,
+                       f(n % 4096 / 64) + 128, n % 64 + 128)
+  end
+  error( string.format("invalid unicode codepoint '%x'", n) )
+end
+
+
+local function parse_unicode_escape(s)
+  local n1 = tonumber( s:sub(1, 4),  16 )
+  local n2 = tonumber( s:sub(7, 10), 16 )
+   -- Surrogate pair?
+  if n2 then
+    return codepoint_to_utf8((n1 - 0xd800) * 0x400 + (n2 - 0xdc00) + 0x10000)
+  else
+    return codepoint_to_utf8(n1)
+  end
+end
+
+
+local function parse_string(str, i)
+  local res = ""
+  local j = i + 1
+  local k = j
+
+  while j <= #str do
+    local x = str:byte(j)
+
+    if x < 32 then
+      decode_error(str, j, "control character in string")
+
+    elseif x == 92 then -- `\`: Escape
+      res = res .. str:sub(k, j - 1)
+      j = j + 1
+      local c = str:sub(j, j)
+      if c == "u" then
+        local hex = str:match("^[dD][89aAbB]%x%x\\u%x%x%x%x", j + 1)
+                 or str:match("^%x%x%x%x", j + 1)
+                 or decode_error(str, j - 1, "invalid unicode escape in string")
+        res = res .. parse_unicode_escape(hex)
+        j = j + #hex
+      else
+        if not escape_chars[c] then
+          decode_error(str, j - 1, "invalid escape char '" .. c .. "' in string")
+        end
+        res = res .. escape_char_map_inv[c]
+      end
+      k = j + 1
+
+    elseif x == 34 then -- `"`: End of string
+      res = res .. str:sub(k, j - 1)
+      return res, j + 1
+    end
+
+    j = j + 1
+  end
+
+  decode_error(str, i, "expected closing quote for string")
+end
+
+
+local function parse_number(str, i)
+  local x = next_char(str, i, delim_chars)
+  local s = str:sub(i, x - 1)
+  local n = tonumber(s)
+  if not n then
+    decode_error(str, i, "invalid number '" .. s .. "'")
+  end
+  return n, x
+end
+
+
+local function parse_literal(str, i)
+  local x = next_char(str, i, delim_chars)
+  local word = str:sub(i, x - 1)
+  if not literals[word] then
+    decode_error(str, i, "invalid literal '" .. word .. "'")
+  end
+  return literal_map[word], x
+end
+
+
+local function parse_array(str, i)
+  local res = {}
+  local n = 1
+  i = i + 1
+  while 1 do
+    local x
+    i = next_char(str, i, space_chars, true)
+    -- Empty / end of array?
+    if str:sub(i, i) == "]" then
+      i = i + 1
+      break
+    end
+    -- Read token
+    x, i = parse(str, i)
+    res[n] = x
+    n = n + 1
+    -- Next token
+    i = next_char(str, i, space_chars, true)
+    local chr = str:sub(i, i)
+    i = i + 1
+    if chr == "]" then break end
+    if chr ~= "," then decode_error(str, i, "expected ']' or ','") end
+  end
+  return res, i
+end
+
+
+local function parse_object(str, i)
+  local res = {}
+  i = i + 1
+  while 1 do
+    local key, val
+    i = next_char(str, i, space_chars, true)
+    -- Empty / end of object?
+    if str:sub(i, i) == "}" then
+      i = i + 1
+      break
+    end
+    -- Read key
+    if str:sub(i, i) ~= '"' then
+      decode_error(str, i, "expected string for key")
+    end
+    key, i = parse(str, i)
+    -- Read ':' delimiter
+    i = next_char(str, i, space_chars, true)
+    if str:sub(i, i) ~= ":" then
+      decode_error(str, i, "expected ':' after key")
+    end
+    i = next_char(str, i + 1, space_chars, true)
+    -- Read value
+    val, i = parse(str, i)
+    -- Set
+    res[key] = val
+    -- Next token
+    i = next_char(str, i, space_chars, true)
+    local chr = str:sub(i, i)
+    i = i + 1
+    if chr == "}" then break end
+    if chr ~= "," then decode_error(str, i, "expected '}' or ','") end
+  end
+  return res, i
+end
+
+
+local char_func_map = {
+  [ '"' ] = parse_string,
+  [ "0" ] = parse_number,
+  [ "1" ] = parse_number,
+  [ "2" ] = parse_number,
+  [ "3" ] = parse_number,
+  [ "4" ] = parse_number,
+  [ "5" ] = parse_number,
+  [ "6" ] = parse_number,
+  [ "7" ] = parse_number,
+  [ "8" ] = parse_number,
+  [ "9" ] = parse_number,
+  [ "-" ] = parse_number,
+  [ "t" ] = parse_literal,
+  [ "f" ] = parse_literal,
+  [ "n" ] = parse_literal,
+  [ "[" ] = parse_array,
+  [ "{" ] = parse_object,
+}
+
+
+parse = function(str, idx)
+  local chr = str:sub(idx, idx)
+  local f = char_func_map[chr]
+  if f then
+    return f(str, idx)
+  end
+  decode_error(str, idx, "unexpected character '" .. chr .. "'")
+end
+
+
+function json.decode(str)
+  if type(str) ~= "string" then
+    error("expected argument of type string, got " .. type(str))
+  end
+  local res, idx = parse(str, next_char(str, 1, space_chars, true))
+  idx = next_char(str, idx, space_chars, true)
+  if idx <= #str then
+    decode_error(str, idx, "trailing garbage")
+  end
+  return res
+end
+
+
+return json
diff --git a/api/build-tools/schema-sync.lua b/api/build-tools/schema-sync.lua
new file mode 100644
index 0000000..596f6d8
--- /dev/null
+++ b/api/build-tools/schema-sync.lua
@@ -0,0 +1,133 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+--     http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+local json = require("json")
+
+-- simulate loading modules to avoid errors that will cause fail to read json schema
+local fake_module_list = {
+    'cjson',
+    'cjson.safe',
+    'bit',
+    'lfs',
+    'ngx.process',
+    'ngx.re',
+    'net.url',
+    'opentracing.tracer',
+    'pb',
+    'prometheus',
+    'protoc',
+
+    'resty.cookie',
+    'resty.core.regex',
+    'resty.hmac',
+    'resty.http',
+    'resty.ipmatcher',
+    'resty.jit-uuid',
+    'resty.jwt',
+    'resty.kafka.producer',
+    'resty.limit.count',
+    'resty.limit.conn',
+    'resty.limit.req',
+    'resty.logger.socket',
+    'resty.lock',
+    'resty.openidc',
+    'resty.random',
+    'resty.redis',
+    'resty.signal',
+    'resty.string',
+
+    'apisix.consumer',
+    'apisix.core.json',
+    'apisix.core.schema',
+    'apisix.upstream',
+    'apisix.utils.log-util',
+    'apisix.utils.batch-processor',
+    'apisix.plugin',
+    'apisix.plugins.skywalking.client',
+    'apisix.plugins.skywalking.tracer',
+    'apisix.plugins.zipkin.codec',
+    'apisix.plugins.zipkin.random_sampler',
+    'apisix.plugins.zipkin.reporter'
+}
+for _, name in ipairs(fake_module_list) do
+    package.loaded[name] = {}
+end
+
+
+local empty_function = function()
+end
+
+
+ngx = {}
+ngx.re = {}
+ngx.timer = {}
+ngx.location = {}
+ngx.socket = {}
+ngx.re.gmatch = empty_function
+
+-- additional define for management
+local time_def = {
+   type = "integer",
+}
+local schema = require("apisix.schema_def")
+for _, resource in ipairs({"ssl", "route", "service", "upstream", "consumer"}) do
+  schema[resource].properties.create_time = time_def
+  schema[resource].properties.update_time = time_def
+end
+schema.ssl.properties.validity_start = time_def
+schema.ssl.properties.validity_end = time_def
+
+package.loaded["apisix.core"] = {
+    lrucache = {
+        new = empty_function
+    },
+    schema = schema,
+    id = {
+        get = empty_function
+    },
+    table = {
+        insert = empty_function
+    }
+}
+
+
+function get_plugin_list()
+    local all = io.popen("ls apisix/plugins");
+    local list = {};
+    for filename in all:lines() do
+        suffix = string.sub(filename, -4)
+        if suffix == ".lua" then
+            table.insert(list, string.sub(filename, 1, -5))
+        end
+    end
+    all:close()
+    return list
+end
+
+
+local schema_all = {}
+schema_all.main = schema
+schema_all.plugins = {}
+
+local plugins = get_plugin_list()
+for idx, plugin_name in pairs(plugins) do
+    local plugin = require("apisix.plugins." .. plugin_name)
+    if plugin and type(plugin) == "table" and plugin.schema then
+        schema_all.plugins[plugin_name] = plugin.schema
+    end
+end
+
+print(json.encode(schema_all))
diff --git a/api/conf/conf.go b/api/conf/conf.go
index cbd7f8e..bda5bdb 100644
--- a/api/conf/conf.go
+++ b/api/conf/conf.go
@@ -32,13 +32,16 @@ const BETA = "beta"
 const DEV = "dev"
 const LOCAL = "local"
 const confPath = "/go/manager-api/conf.json"
+const schemaPath = "/go/manager-api/schema.json"
 const RequestId = "requestId"
 
 var (
-	ENV      string
-	basePath string
-	ApiKey   = "edd1c9f034335f136f87ad84b625c8f1"
-	BaseUrl  = "http://127.0.0.1:9080/apisix/admin"
+	ENV        string
+	basePath   string
+	Schema     gjson.Result
+	ApiKey     = "edd1c9f034335f136f87ad84b625c8f1"
+	BaseUrl    = "http://127.0.0.1:9080/apisix/admin"
+	DagLibPath = "/go/manager-api/dag-to-lua/"
 )
 
 func init() {
@@ -46,6 +49,7 @@ func init() {
 	initMysql()
 	initApisix()
 	initAuthentication()
+	initSchema()
 }
 
 func setEnvironment() {
@@ -54,6 +58,11 @@ func setEnvironment() {
 	} else {
 		ENV = env
 	}
+
+	if env := os.Getenv("APIX_DAG_LIB_PATH"); env != "" {
+		DagLibPath = env
+	}
+
 	_, basePath, _, _ = runtime.Caller(1)
 }
 
@@ -65,6 +74,14 @@ func configurationPath() string {
 	}
 }
 
+func getSchemaPath() string {
+	if ENV == LOCAL {
+		return filepath.Join(filepath.Dir(basePath), "schema.json")
+	} else {
+		return schemaPath
+	}
+}
+
 type mysqlConfig struct {
 	Address  string
 	User     string
@@ -138,3 +155,12 @@ func initAuthentication() {
 		AuthenticationConfig.Session.ExpireTime = configuration.Get("authentication.session.expireTime").Uint()
 	}
 }
+
+func initSchema() {
+	filePath := getSchemaPath()
+	if schemaContent, err := ioutil.ReadFile(filePath); err != nil {
+		panic(fmt.Sprintf("fail to read configuration: %s", filePath))
+	} else {
+		Schema = gjson.ParseBytes(schemaContent)
+	}
+}
diff --git a/api/conf/schema.json b/api/conf/schema.json
new file mode 100644
index 0000000..bf606b5
--- /dev/null
+++ b/api/conf/schema.json
@@ -0,0 +1 @@
+{"plugins":{"serverless-post-function":{"type":"object","properties":{"phase":{"type":"string","enum":["rewrite","access","header_filter","body_filter","log","balancer"]},"functions":{"type":"array","items":{"type":"string"},"minItems":1}},"required":["functions"]},"prometheus":{"type":"object","additionalProperties":false},"syslog":{"type":"object","properties":{"port":{"type":"integer"},"flush_limit":{"type":"integer","default":4096,"minimum":1},"name":{"type":"string","default":"sys l [...]
diff --git a/api/internal/core/entity/entity.go b/api/internal/core/entity/entity.go
index 8a2fae4..06f4d31 100644
--- a/api/internal/core/entity/entity.go
+++ b/api/internal/core/entity/entity.go
@@ -32,24 +32,24 @@ type BaseInfoGetter interface {
 
 type Route struct {
 	BaseInfo
-	URI             string      `json:"uri,omitempty" validate:"uri"`
-	Uris            []string    `json:"uris,omitempty"`
-	Name            string      `json:"name,omitempty" validate:"max=50"`
-	Desc            string      `json:"desc,omitempty" validate:"max=256"`
-	Priority        int         `json:"priority,omitempty"`
-	Methods         []string    `json:"methods,omitempty"`
-	Host            string      `json:"host,omitempty"`
-	Hosts           []string    `json:"hosts,omitempty"`
-	RemoteAddr      string      `json:"remote_addr,omitempty"`
-	RemoteAddrs     []string    `json:"remote_addrs,omitempty"`
-	Vars            string      `json:"vars,omitempty"`
-	FilterFunc      string      `json:"filter_func,omitempty"`
-	Script          interface{} `json:"script,omitempty"`
-	Plugins         interface{} `json:"plugins,omitempty"`
-	Upstream        Upstream    `json:"upstream,omitempty"`
-	ServiceID       string      `json:"service_id,omitempty"`
-	UpstreamID      string      `json:"upstream_id,omitempty"`
-	ServiceProtocol string      `json:"service_protocol,omitempty"`
+	URI             string                 `json:"uri,omitempty"`
+	Uris            []string               `json:"uris,omitempty"`
+	Name            string                 `json:"name,omitempty" validate:"max=50"`
+	Desc            string                 `json:"desc,omitempty" validate:"max=256"`
+	Priority        int                    `json:"priority,omitempty"`
+	Methods         []string               `json:"methods,omitempty"`
+	Host            string                 `json:"host,omitempty"`
+	Hosts           []string               `json:"hosts,omitempty"`
+	RemoteAddr      string                 `json:"remote_addr,omitempty"`
+	RemoteAddrs     []string               `json:"remote_addrs,omitempty"`
+	Vars            interface{}            `json:"vars,omitempty"`
+	FilterFunc      string                 `json:"filter_func,omitempty"`
+	Script          interface{}            `json:"script,omitempty"`
+	Plugins         map[string]interface{} `json:"plugins,omitempty"`
+	Upstream        interface{}            `json:"upstream,omitempty"`
+	ServiceID       string                 `json:"service_id,omitempty"`
+	UpstreamID      string                 `json:"upstream_id,omitempty"`
+	ServiceProtocol string                 `json:"service_protocol,omitempty"`
 }
 
 // --- structures for upstream start  ---
@@ -114,12 +114,12 @@ type HealthChecker struct {
 
 type Upstream struct {
 	BaseInfo
-	Nodes           []Node        `json:"nodes,omitempty"`
+	Nodes           []interface{} `json:"nodes,omitempty"`
 	Retries         int           `json:"retries,omitempty"`
-	Timeout         Timeout       `json:"timeout,omitempty"`
-	K8sInfo         K8sInfo       `json:"k8s_deployment_info,omitempty"`
+	Timeout         interface{}   `json:"timeout,omitempty"`
+	K8sInfo         interface{}   `json:"k8s_deployment_info,omitempty"`
 	Type            string        `json:"type,omitempty"`
-	Checks          HealthChecker `json:"checks,omitempty"`
+	Checks          interface{}   `json:"checks,omitempty"`
 	HashOn          string        `json:"hash_on,omitempty"`
 	Key             string        `json:"key,omitempty"`
 	EnableWebsocket bool          `json:"enable_websocket,omitempty"`
@@ -147,33 +147,33 @@ func (upstream *Upstream) Parse2NameResponse() (*UpstreamNameResponse, error) {
 
 type Consumer struct {
 	BaseInfo
-	Username string      `json:"username"`
-	Desc     string      `json:"desc,omitempty"`
-	Plugins  interface{} `json:"plugins,omitempty"`
+	Username string                 `json:"username"`
+	Desc     string                 `json:"desc,omitempty"`
+	Plugins  map[string]interface{} `json:"plugins,omitempty"`
 }
 
 type SSL struct {
 	BaseInfo
-	Cert          string   `json:"cert"`
+	Cert          string   `json:"cert,omitempty"`
 	Key           string   `json:"key,omitempty"`
-	Sni           string   `json:"sni"`
-	Snis          []string `json:"snis"`
-	Certs         []string `json:"certs"`
+	Sni           string   `json:"sni,omitempty"`
+	Snis          []string `json:"snis,omitempty"`
+	Certs         []string `json:"certs,omitempty"`
 	Keys          []string `json:"keys,omitempty"`
-	ExpTime       int64    `json:"exptime"`
+	ExpTime       int64    `json:"exptime,omitempty"`
 	Status        int      `json:"status"`
-	ValidityStart int64    `json:"validity_start"`
-	ValidityEnd   int64    `json:"validity_end"`
+	ValidityStart int64    `json:"validity_start,omitempty"`
+	ValidityEnd   int64    `json:"validity_end,omitempty"`
 }
 
 type Service struct {
 	BaseInfo
-	Name       string      `json:"name,omitempty"`
-	Desc       string      `json:"desc,omitempty"`
-	Upstream   Upstream    `json:"upstream,omitempty"`
-	UpstreamID string      `json:"upstream_id,omitempty"`
-	Plugins    interface{} `json:"plugins,omitempty"`
-	Script     string      `json:"script,omitempty"`
+	Name       string                 `json:"name,omitempty"`
+	Desc       string                 `json:"desc,omitempty"`
+	Upstream   interface{}            `json:"upstream,omitempty"`
+	UpstreamID string                 `json:"upstream_id,omitempty"`
+	Plugins    map[string]interface{} `json:"plugins,omitempty"`
+	Script     string                 `json:"script,omitempty"`
 }
 
 type Script struct {
diff --git a/api/internal/core/store/query.go b/api/internal/core/store/query.go
index c05a7cd..6ec53b2 100644
--- a/api/internal/core/store/query.go
+++ b/api/internal/core/store/query.go
@@ -67,8 +67,8 @@ var NoFilter = &Filter{
 }
 
 type Pagination struct {
-	PageSize   int
-	PageNumber int
+	PageSize   int `json:"pageSize" form:"pageSize" auto_read:"pageSize"`
+	PageNumber int `json:"page" form:"page" auto_read:"page"`
 }
 
 func NewPagination(PageSize, pageNumber int) *Pagination {
diff --git a/api/internal/core/store/store.go b/api/internal/core/store/store.go
index a82089f..8a4b71c 100644
--- a/api/internal/core/store/store.go
+++ b/api/internal/core/store/store.go
@@ -223,10 +223,6 @@ func (s *GenericStore) ingestValidate(obj interface{}) (err error) {
 }
 
 func (s *GenericStore) Create(ctx context.Context, obj interface{}) error {
-	if err := s.ingestValidate(obj); err != nil {
-		return err
-	}
-
 	if getter, ok := obj.(entity.BaseInfoGetter); ok {
 		info := getter.GetBaseInfo()
 		if info.ID == "" {
@@ -236,6 +232,10 @@ func (s *GenericStore) Create(ctx context.Context, obj interface{}) error {
 		info.UpdateTime = time.Now().Unix()
 	}
 
+	if err := s.ingestValidate(obj); err != nil {
+		return err
+	}
+
 	key := s.opt.KeyFunc(obj)
 	if key == "" {
 		return fmt.Errorf("key is required")
diff --git a/api/internal/core/store/storehub.go b/api/internal/core/store/storehub.go
index d07210a..b2685c9 100644
--- a/api/internal/core/store/storehub.go
+++ b/api/internal/core/store/storehub.go
@@ -40,6 +40,14 @@ var (
 )
 
 func InitStore(key HubKey, opt GenericStoreOption) error {
+	if key == HubKeyConsumer || key == HubKeyRoute ||
+		key == HubKeyService || key == HubKeySsl || key == HubKeyUpstream {
+		validator, err := NewAPISIXJsonSchemaValidator("main." + string(key))
+		if err != nil {
+			return err
+		}
+		opt.Validator = validator
+	}
 	s, err := NewGenericStore(opt)
 	if err != nil {
 		return err
diff --git a/api/internal/core/store/validate.go b/api/internal/core/store/validate.go
index b936cc0..a5851c0 100644
--- a/api/internal/core/store/validate.go
+++ b/api/internal/core/store/validate.go
@@ -19,9 +19,13 @@ package store
 import (
 	"errors"
 	"fmt"
+	"io/ioutil"
+
 	"github.com/xeipuuv/gojsonschema"
 	"go.uber.org/zap/buffer"
-	"io/ioutil"
+
+	"github.com/apisix/manager-api/conf"
+	"github.com/apisix/manager-api/internal/core/entity"
 )
 
 type Validator interface {
@@ -63,3 +67,89 @@ func (v *JsonSchemaValidator) Validate(obj interface{}) error {
 	}
 	return nil
 }
+
+type APISIXJsonSchemaValidator struct {
+	schema *gojsonschema.Schema
+}
+
+func NewAPISIXJsonSchemaValidator(jsonPath string) (Validator, error) {
+	schemaDef := conf.Schema.Get(jsonPath).String()
+	if schemaDef == "" {
+		return nil, fmt.Errorf("schema not found")
+	}
+
+	s, err := gojsonschema.NewSchema(gojsonschema.NewStringLoader(schemaDef))
+	if err != nil {
+		return nil, fmt.Errorf("new schema failed: %w", err)
+	}
+	return &APISIXJsonSchemaValidator{
+		schema: s,
+	}, nil
+}
+
+func getPlugins(reqBody interface{}) map[string]interface{} {
+	switch reqBody.(type) {
+	case *entity.Route:
+		route := reqBody.(*entity.Route)
+		return route.Plugins
+	case *entity.Service:
+		service := reqBody.(*entity.Service)
+		return service.Plugins
+	case *entity.Consumer:
+		consumer := reqBody.(*entity.Consumer)
+		return consumer.Plugins
+	}
+	return nil
+}
+
+func (v *APISIXJsonSchemaValidator) Validate(obj interface{}) error {
+	ret, err := v.schema.Validate(gojsonschema.NewGoLoader(obj))
+	if err != nil {
+		return fmt.Errorf("validate failed: %w", err)
+	}
+
+	if !ret.Valid() {
+		errString := buffer.Buffer{}
+		for i, vErr := range ret.Errors() {
+			if i != 0 {
+				errString.AppendString("\n")
+			}
+			errString.AppendString(vErr.String())
+		}
+		return fmt.Errorf("scheme validate fail: %s", errString.String())
+	}
+
+	//check plugin json schema
+	plugins := getPlugins(obj)
+	if plugins != nil {
+		for pluginName, pluginConf := range plugins {
+			schemaDef := conf.Schema.Get("plugins." + pluginName).String()
+			if schemaDef == "" {
+				return fmt.Errorf("schema not found")
+			}
+
+			s, err := gojsonschema.NewSchema(gojsonschema.NewStringLoader(schemaDef))
+			if err != nil {
+				return fmt.Errorf("new schema failed: %w", err)
+			}
+
+			ret, err := s.Validate(gojsonschema.NewGoLoader(pluginConf))
+			if err != nil {
+				return fmt.Errorf("validate failed: %w", err)
+			}
+
+			if !ret.Valid() {
+				errString := buffer.Buffer{}
+				for i, vErr := range ret.Errors() {
+					if i != 0 {
+						errString.AppendString("\n")
+					}
+					errString.AppendString(vErr.String())
+				}
+				return fmt.Errorf("scheme validate fail: %s", errString.String())
+			}
+		}
+	}
+
+	return nil
+}
diff --git a/api/internal/core/store/validate_test.go b/api/internal/core/store/validate_test.go
index 3821154..18597d8 100644
--- a/api/internal/core/store/validate_test.go
+++ b/api/internal/core/store/validate_test.go
@@ -17,9 +17,13 @@
 package store
 
 import (
+	"encoding/json"
 	"fmt"
-	"github.com/stretchr/testify/assert"
 	"testing"
+
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
 )
 
 type TestObj struct {
@@ -65,3 +69,72 @@ func TestJsonSchemaValidator_Validate(t *testing.T) {
 		assert.True(t, ret)
 	}
 }
+
+func TestAPISIXJsonSchemaValidator_Validate(t *testing.T) {
+	validator, err := NewAPISIXJsonSchemaValidator("main.consumer")
+	assert.Nil(t, err)
+
+	consumer := &entity.Consumer{}
+	reqBody := `{
+      "id": "jack",
+      "username": "jack",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer)
+
+	err = validator.Validate(consumer)
+	assert.Nil(t, err)
+
+	consumer2 := &entity.Consumer{}
+	reqBody = `{
+      "username": "jack",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer2)
+
+	err = validator.Validate(consumer2)
+	assert.NotNil(t, err)
+	assert.EqualError(t, err, "scheme validate fail: id: Must validate at least one schema (anyOf)\nid: String length must be greater than or equal to 1\nid: Does not match pattern '^[a-zA-Z0-9-_]+$'")
+
+	//check nil obj
+	err = validator.Validate(nil)
+	assert.NotNil(t, err)
+	assert.EqualError(t, err, "scheme validate fail: (root): Invalid type. Expected: object, given: null")
+
+	//plugin schema fail
+	consumer3 := &entity.Consumer{}
+	reqBody = `{
+      "id": "jack",
+      "username": "jack",
+      "plugins": {
+          "limit-count": {
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer3)
+
+	err = validator.Validate(consumer3)
+	assert.NotNil(t, err)
+	assert.EqualError(t, err, "scheme validate fail: (root): count is required")
+
+}
diff --git a/api/internal/handler/consumer/consumer.go b/api/internal/handler/consumer/consumer.go
index d2533e7..372a765 100644
--- a/api/internal/handler/consumer/consumer.go
+++ b/api/internal/handler/consumer/consumer.go
@@ -23,7 +23,6 @@ import (
 
 	"github.com/gin-gonic/gin"
 	"github.com/shiningrush/droplet"
-	"github.com/shiningrush/droplet/data"
 	"github.com/shiningrush/droplet/wrapper"
 	wgin "github.com/shiningrush/droplet/wrapper/gin"
 
@@ -71,7 +70,7 @@ func (h *Handler) Get(c droplet.Context) (interface{}, error) {
 
 type ListInput struct {
 	Username string `auto_read:"username,query"`
-	data.Pager
+	store.Pagination
 }
 
 func (h *Handler) List(c droplet.Context) (interface{}, error) {
diff --git a/api/internal/handler/consumer/consumer_test.go b/api/internal/handler/consumer/consumer_test.go
new file mode 100644
index 0000000..5291af2
--- /dev/null
+++ b/api/internal/handler/consumer/consumer_test.go
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package consumer
+
+import (
+	"encoding/json"
+	"testing"
+	"time"
+
+	"github.com/shiningrush/droplet"
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
+	"github.com/apisix/manager-api/internal/core/storage"
+	"github.com/apisix/manager-api/internal/core/store"
+)
+
+func TestConsumer(t *testing.T) {
+	// init
+	err := storage.InitETCDClient([]string{"127.0.0.1:2379"})
+	assert.Nil(t, err)
+	err = store.InitStores()
+	assert.Nil(t, err)
+
+	handler := &Handler{
+		consumerStore: store.GetStore(store.HubKeyConsumer),
+	}
+	assert.NotNil(t, handler)
+
+	//create consumer
+	ctx := droplet.NewContext()
+	consumer := &entity.Consumer{}
+	reqBody := `{
+      "username": "jack",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer)
+	ctx.SetInput(consumer)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//create consumer 2
+	consumer2 := &entity.Consumer{}
+	reqBody = `{
+      "username": "pony",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer2)
+	ctx.SetInput(consumer2)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//get consumer
+	input := &GetInput{}
+	reqBody = `{"username": "jack"}`
+	json.Unmarshal([]byte(reqBody), input)
+	ctx.SetInput(input)
+	ret, err := handler.Get(ctx)
+	stored := ret.(*entity.Consumer)
+	assert.Nil(t, err)
+	assert.Equal(t, stored.ID, consumer.ID)
+	assert.Equal(t, stored.Username, consumer.Username)
+
+	//update consumer
+	consumer3 := &UpdateInput{}
+	consumer3.Username = "pony"
+	reqBody = `{
+      "username": "pony",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description2"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer3)
+	ctx.SetInput(consumer3)
+	_, err = handler.Update(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//check update
+	input3 := &GetInput{}
+	reqBody = `{"username": "pony"}`
+	json.Unmarshal([]byte(reqBody), input3)
+	ctx.SetInput(input3)
+	ret3, err := handler.Get(ctx)
+	stored3 := ret3.(*entity.Consumer)
+	assert.Nil(t, err)
+	assert.Equal(t, stored3.Desc, "test description2") //consumer3.Desc)
+	assert.Equal(t, stored3.Username, consumer3.Username)
+
+	//list page 1
+	listInput := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 1}`
+	json.Unmarshal([]byte(reqBody), listInput)
+	ctx.SetInput(listInput)
+	retPage1, err := handler.List(ctx)
+	dataPage1 := retPage1.(*store.ListOutput)
+	assert.Equal(t, len(dataPage1.Rows), 1)
+
+	//list page 2
+	listInput2 := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 2}`
+	json.Unmarshal([]byte(reqBody), listInput2)
+	ctx.SetInput(listInput2)
+	retPage2, err := handler.List(ctx)
+	dataPage2 := retPage2.(*store.ListOutput)
+	assert.Equal(t, len(dataPage2.Rows), 1)
+
+	//delete consumer
+	inputDel := &BatchDelete{}
+	reqBody = `{"usernames": "jack"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+	reqBody = `{"usernames": "pony"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+	//create consumer fail
+	consumer_fail := &entity.Consumer{}
+	reqBody = `{
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+    "desc": "test description"
+  }`
+	json.Unmarshal([]byte(reqBody), consumer_fail)
+	ctx.SetInput(consumer_fail)
+	_, err = handler.Create(ctx)
+	assert.NotNil(t, err)
+
+}
diff --git a/api/internal/handler/route/route.go b/api/internal/handler/route/route.go
index 45662b9..8f1b409 100644
--- a/api/internal/handler/route/route.go
+++ b/api/internal/handler/route/route.go
@@ -30,6 +30,7 @@ import (
 	"github.com/shiningrush/droplet/wrapper"
 	wgin "github.com/shiningrush/droplet/wrapper/gin"
 
+	"github.com/apisix/manager-api/conf"
 	"github.com/apisix/manager-api/internal/core/entity"
 	"github.com/apisix/manager-api/internal/core/store"
 	"github.com/apisix/manager-api/internal/handler"
@@ -92,7 +93,7 @@ func (h *Handler) Get(c droplet.Context) (interface{}, error) {
 
 type ListInput struct {
 	Name string `auto_read:"name,query"`
-	data.Pager
+	store.Pagination
 }
 
 func (h *Handler) List(c droplet.Context) (interface{}, error) {
@@ -133,7 +134,7 @@ func generateLuaCode(script map[string]interface{}) (string, error) {
 	}
 
 	cmd := exec.Command("sh", "-c",
-		"cd /go/manager-api/dag-to-lua/ && lua cli.lua "+
+		"cd "+conf.DagLibPath+" && lua cli.lua "+
 			"'"+string(scriptString)+"'")
 
 	stdout, _ := cmd.StdoutPipe()
@@ -152,7 +153,7 @@ func (h *Handler) Create(c droplet.Context) (interface{}, error) {
 	input := c.Input().(*entity.Route)
 	//check depend
 	if input.ServiceID != "" {
-		_, err := h.upstreamStore.Get(input.ServiceID)
+		_, err := h.svcStore.Get(input.ServiceID)
 		if err != nil {
 			if err == data.ErrNotFound {
 				return nil, fmt.Errorf("service id: %s not found", input.ServiceID)
@@ -161,7 +162,7 @@ func (h *Handler) Create(c droplet.Context) (interface{}, error) {
 		}
 	}
 	if input.UpstreamID != "" {
-		_, err := h.upstreamStore.Get(input.ServiceID)
+		_, err := h.upstreamStore.Get(input.UpstreamID)
 		if err != nil {
 			if err == data.ErrNotFound {
 				return nil, fmt.Errorf("upstream id: %s not found", input.UpstreamID)
@@ -207,7 +208,7 @@ func (h *Handler) Update(c droplet.Context) (interface{}, error) {
 
 	//check depend
 	if input.ServiceID != "" {
-		_, err := h.upstreamStore.Get(input.ServiceID)
+		_, err := h.svcStore.Get(input.ServiceID)
 		if err != nil {
 			if err == data.ErrNotFound {
 				return nil, fmt.Errorf("service id: %s not found", input.ServiceID)
@@ -216,7 +217,7 @@ func (h *Handler) Update(c droplet.Context) (interface{}, error) {
 		}
 	}
 	if input.UpstreamID != "" {
-		_, err := h.upstreamStore.Get(input.ServiceID)
+		_, err := h.upstreamStore.Get(input.UpstreamID)
 		if err != nil {
 			if err == data.ErrNotFound {
 				return nil, fmt.Errorf("upstream id: %s not found", input.UpstreamID)
@@ -226,7 +227,7 @@ func (h *Handler) Update(c droplet.Context) (interface{}, error) {
 	}
 
 	if input.Script != nil {
-		script := entity.Script{}
+		script := &entity.Script{}
 		script.ID = input.ID
 		script.Script = input.Script
 		//to lua
@@ -236,8 +237,15 @@ func (h *Handler) Update(c droplet.Context) (interface{}, error) {
 			return nil, err
 		}
 		//save original conf
-		if err = h.scriptStore.Create(c.Context(), script); err != nil {
-			return nil, err
+		if err = h.scriptStore.Update(c.Context(), script); err != nil {
+			//if not exists, create
+			if err.Error() == fmt.Sprintf("key: %s is not found", script.ID) {
+				if err := h.scriptStore.Create(c.Context(), script); err != nil {
+					return nil, err
+				}
+			} else {
+				return nil, err
+			}
 		}
 	}
 
@@ -255,10 +263,14 @@ type BatchDelete struct {
 func (h *Handler) BatchDelete(c droplet.Context) (interface{}, error) {
 	input := c.Input().(*BatchDelete)
 
+	//delete route
 	if err := h.routeStore.BatchDelete(c.Context(), strings.Split(input.IDs, ",")); err != nil {
 		return nil, err
 	}
 
+	//delete stored script
+	h.scriptStore.BatchDelete(c.Context(), strings.Split(input.IDs, ","))
+
 	return nil, nil
 }
 
diff --git a/api/internal/handler/route/route_test.go b/api/internal/handler/route/route_test.go
new file mode 100644
index 0000000..33d48b8
--- /dev/null
+++ b/api/internal/handler/route/route_test.go
@@ -0,0 +1,766 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package route
+
+import (
+	"encoding/json"
+	"testing"
+	"time"
+
+	"github.com/shiningrush/droplet"
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
+	"github.com/apisix/manager-api/internal/core/storage"
+	"github.com/apisix/manager-api/internal/core/store"
+)
+
+func TestRoute(t *testing.T) {
+	// init
+	err := storage.InitETCDClient([]string{"127.0.0.1:2379"})
+	assert.Nil(t, err)
+	err = store.InitStores()
+	assert.Nil(t, err)
+
+	handler := &Handler{
+		routeStore:    store.GetStore(store.HubKeyRoute),
+		svcStore:      store.GetStore(store.HubKeyService),
+		upstreamStore: store.GetStore(store.HubKeyUpstream),
+		scriptStore:   store.GetStore(store.HubKeyScript),
+	}
+	assert.NotNil(t, handler)
+
+	//create Note: depends on lib `dag-to-lua` if script exists
+	ctx := droplet.NewContext()
+	route := &entity.Route{}
+	reqBody := `{
+      "id": "1",
+      "name": "aaaa",
+      "uri": "/index.html",
+      "hosts": ["foo.com", "*.bar.com"],
+      "vars": [],
+      "remote_addrs": ["127.0.0.0/8"],
+      "methods": ["PUT", "GET"],
+      "upstream": {
+          "type": "roundrobin",
+          "nodes": [{
+              "host": "www.a.com",
+              "port": 80,
+              "weight": 1
+          }]
+      },
+      "script":{
+          "rule":{
+              "root":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+              "451106f8-560c-43a4-acf2-2a6ed0ea57b8":[
+                  [
+                      "code == 403",
+                      "b93d622c-92ef-48b4-b6bb-57e1ce893ee3"
+                  ],
+                  [
+                      "",
+                      "988ef5c2-c896-4606-a666-3d4cbe24a731"
+                  ]
+              ]
+          },
+          "conf":{
+              "451106f8-560c-43a4-acf2-2a6ed0ea57b8":{
+                  "name":"uri-blocker",
+                  "conf":{
+                      "block_rules":[
+                          "root.exe",
+                          "root.m+"
+                      ],
+                      "rejected_code":403
+                  }
+              },
+              "988ef5c2-c896-4606-a666-3d4cbe24a731":{
+                  "name":"kafka-logger",
+                  "conf":{
+                      "batch_max_size":1000,
+                      "broker_list":{
+
+                      },
+                      "buffer_duration":60,
+                      "inactive_timeout":5,
+                      "include_req_body":false,
+                      "kafka_topic":"1",
+                      "key":"2",
+                      "max_retry_count":0,
+                      "name":"kafka logger",
+                      "retry_delay":1,
+                      "timeout":3
+                  }
+              },
+              "b93d622c-92ef-48b4-b6bb-57e1ce893ee3":{
+                  "name":"fault-injection",
+                  "conf":{
+                      "abort":{
+                          "body":"200",
+                          "http_status":300
+                      },
+                      "delay":{
+                          "duration":500
+                      }
+                  }
+              }
+          },
+          "chart":{
+              "hovered":{
+
+              },
+              "links":{
+                  "3a110c30-d6f3-40b1-a8ac-b828cfaa2489":{
+                      "from":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port3"
+                      },
+                      "id":"3a110c30-d6f3-40b1-a8ac-b828cfaa2489",
+                      "to":{
+                          "nodeId":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                          "portId":"port1"
+                      }
+                  },
+                  "c1958993-c1ef-44b1-bb32-7fc6f34870c2":{
+                      "from":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port2"
+                      },
+                      "id":"c1958993-c1ef-44b1-bb32-7fc6f34870c2",
+                      "to":{
+                          "nodeId":"988ef5c2-c896-4606-a666-3d4cbe24a731",
+                          "portId":"port1"
+                      }
+                  },
+                  "f9c42bf6-c8aa-4e86-8498-8dfbc5c53c23":{
+                      "from":{
+                          "nodeId":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+                          "portId":"port2"
+                      },
+                      "id":"f9c42bf6-c8aa-4e86-8498-8dfbc5c53c23",
+                      "to":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port1"
+                      }
+                  }
+              },
+              "nodes":{
+                  "3365eca3-4bc8-4769-bab3-1485dfd6a43c":{
+                      "id":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":107,
+                                  "y":0
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":92,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "value":"no"
+                              },
+                              "type":"output"
+                          },
+                          "port3":{
+                              "id":"port3",
+                              "position":{
+                                  "x":122,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "value":"yes"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":750.2627969928922,
+                          "y":301.0370335799397
+                      },
+                      "properties":{
+                          "customData":{
+                              "name":"code == 403",
+                              "type":1
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":214
+                      },
+                      "type":"判断条件"
+                  },
+                  "451106f8-560c-43a4-acf2-2a6ed0ea57b8":{
+                      "id":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":100,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":100,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":741.5684544145346,
+                          "y":126.75879247285502
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "block_rules":[
+                                      "root.exe",
+                                      "root.m+"
+                                  ],
+                                  "rejected_code":403
+                              },
+                              "name":"uri-blocker",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":201
+                      },
+                      "type":"uri-blocker"
+                  },
+                  "988ef5c2-c896-4606-a666-3d4cbe24a731":{
+                      "id":"988ef5c2-c896-4606-a666-3d4cbe24a731",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":106,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":106,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":607.9687500000001,
+                          "y":471.17788461538447
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "batch_max_size":1000,
+                                  "broker_list":{
+
+                                  },
+                                  "buffer_duration":60,
+                                  "inactive_timeout":5,
+                                  "include_req_body":false,
+                                  "kafka_topic":"1",
+                                  "key":"2",
+                                  "max_retry_count":0,
+                                  "name":"kafka logger",
+                                  "retry_delay":1,
+                                  "timeout":3
+                              },
+                              "name":"kafka-logger",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":212
+                      },
+                      "type":"kafka-logger"
+                  },
+                  "b93d622c-92ef-48b4-b6bb-57e1ce893ee3":{
+                      "id":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":110,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":110,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":988.9074986362261,
+                          "y":478.62041800736495
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "abort":{
+                                      "body":"200",
+                                      "http_status":300
+                                  },
+                                  "delay":{
+                                      "duration":500
+                                  }
+                              },
+                              "name":"fault-injection",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":219
+                      },
+                      "type":"fault-injection"
+                  }
+              },
+              "offset":{
+                  "x":-376.83,
+                  "y":87.98
+              },
+              "scale":0.832,
+              "selected":{
+                  "id":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                  "type":"node"
+              }
+          }
+      }
+  }`
+	json.Unmarshal([]byte(reqBody), route)
+	ctx.SetInput(route)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//get
+	input := &GetInput{}
+	input.ID = "1"
+	ctx.SetInput(input)
+	ret, err := handler.Get(ctx)
+	stored := ret.(*entity.Route)
+	assert.Nil(t, err)
+	assert.Equal(t, stored.ID, route.ID)
+
+	//update
+	route2 := &UpdateInput{}
+	route2.ID = "1"
+	reqBody = `{
+      "id": "1",
+      "name": "aaaa",
+      "uri": "/index.html",
+      "hosts": ["foo.com", "*.bar.com"],
+      "remote_addrs": ["127.0.0.0/8"],
+      "methods": ["PUT", "GET"],
+      "upstream": {
+          "type": "roundrobin",
+          "nodes": [{
+              "host": "www.a.com",
+              "port": 80,
+              "weight": 1
+          }]
+      },
+      "script":{
+          "rule":{
+              "root":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+              "451106f8-560c-43a4-acf2-2a6ed0ea57b8":[
+                  [
+                      "code == 403",
+                      "b93d622c-92ef-48b4-b6bb-57e1ce893ee3"
+                  ],
+                  [
+                      "",
+                      "988ef5c2-c896-4606-a666-3d4cbe24a731"
+                  ]
+              ]
+          },
+          "conf":{
+              "451106f8-560c-43a4-acf2-2a6ed0ea57b8":{
+                  "name":"uri-blocker",
+                  "conf":{
+                      "block_rules":[
+                          "root.exe",
+                          "root.m+"
+                      ],
+                      "rejected_code":403
+                  }
+              },
+              "988ef5c2-c896-4606-a666-3d4cbe24a731":{
+                  "name":"kafka-logger",
+                  "conf":{
+                      "batch_max_size":1000,
+                      "broker_list":{
+
+                      },
+                      "buffer_duration":60,
+                      "inactive_timeout":5,
+                      "include_req_body":false,
+                      "kafka_topic":"1",
+                      "key":"2",
+                      "max_retry_count":0,
+                      "name":"kafka logger",
+                      "retry_delay":1,
+                      "timeout":3
+                  }
+              },
+              "b93d622c-92ef-48b4-b6bb-57e1ce893ee3":{
+                  "name":"fault-injection",
+                  "conf":{
+                      "abort":{
+                          "body":"200",
+                          "http_status":300
+                      },
+                      "delay":{
+                          "duration":500
+                      }
+                  }
+              }
+          },
+          "chart":{
+              "hovered":{
+
+              },
+              "links":{
+                  "3a110c30-d6f3-40b1-a8ac-b828cfaa2489":{
+                      "from":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port3"
+                      },
+                      "id":"3a110c30-d6f3-40b1-a8ac-b828cfaa2489",
+                      "to":{
+                          "nodeId":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                          "portId":"port1"
+                      }
+                  },
+                  "c1958993-c1ef-44b1-bb32-7fc6f34870c2":{
+                      "from":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port2"
+                      },
+                      "id":"c1958993-c1ef-44b1-bb32-7fc6f34870c2",
+                      "to":{
+                          "nodeId":"988ef5c2-c896-4606-a666-3d4cbe24a731",
+                          "portId":"port1"
+                      }
+                  },
+                  "f9c42bf6-c8aa-4e86-8498-8dfbc5c53c23":{
+                      "from":{
+                          "nodeId":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+                          "portId":"port2"
+                      },
+                      "id":"f9c42bf6-c8aa-4e86-8498-8dfbc5c53c23",
+                      "to":{
+                          "nodeId":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                          "portId":"port1"
+                      }
+                  }
+              },
+              "nodes":{
+                  "3365eca3-4bc8-4769-bab3-1485dfd6a43c":{
+                      "id":"3365eca3-4bc8-4769-bab3-1485dfd6a43c",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":107,
+                                  "y":0
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":92,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "value":"no"
+                              },
+                              "type":"output"
+                          },
+                          "port3":{
+                              "id":"port3",
+                              "position":{
+                                  "x":122,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "value":"yes"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":750.2627969928922,
+                          "y":301.0370335799397
+                      },
+                      "properties":{
+                          "customData":{
+                              "name":"code == 403",
+                              "type":1
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":214
+                      },
+                      "type":"判断条件"
+                  },
+                  "451106f8-560c-43a4-acf2-2a6ed0ea57b8":{
+                      "id":"451106f8-560c-43a4-acf2-2a6ed0ea57b8",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":100,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":100,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":741.5684544145346,
+                          "y":126.75879247285502
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "block_rules":[
+                                      "root.exe",
+                                      "root.m+"
+                                  ],
+                                  "rejected_code":403
+                              },
+                              "name":"uri-blocker",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":201
+                      },
+                      "type":"uri-blocker"
+                  },
+                  "988ef5c2-c896-4606-a666-3d4cbe24a731":{
+                      "id":"988ef5c2-c896-4606-a666-3d4cbe24a731",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":106,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":106,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":607.9687500000001,
+                          "y":471.17788461538447
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "batch_max_size":1000,
+                                  "broker_list":{
+
+                                  },
+                                  "buffer_duration":60,
+                                  "inactive_timeout":5,
+                                  "include_req_body":false,
+                                  "kafka_topic":"1",
+                                  "key":"2",
+                                  "max_retry_count":0,
+                                  "name":"kafka logger",
+                                  "retry_delay":1,
+                                  "timeout":3
+                              },
+                              "name":"kafka-logger",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":212
+                      },
+                      "type":"kafka-logger"
+                  },
+                  "b93d622c-92ef-48b4-b6bb-57e1ce893ee3":{
+                      "id":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                      "orientation":0,
+                      "ports":{
+                          "port1":{
+                              "id":"port1",
+                              "position":{
+                                  "x":110,
+                                  "y":0
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"input"
+                          },
+                          "port2":{
+                              "id":"port2",
+                              "position":{
+                                  "x":110,
+                                  "y":96
+                              },
+                              "properties":{
+                                  "custom":"property"
+                              },
+                              "type":"output"
+                          }
+                      },
+                      "position":{
+                          "x":988.9074986362261,
+                          "y":478.62041800736495
+                      },
+                      "properties":{
+                          "customData":{
+                              "data":{
+                                  "abort":{
+                                      "body":"200",
+                                      "http_status":300
+                                  },
+                                  "delay":{
+                                      "duration":500
+                                  }
+                              },
+                              "name":"fault-injection",
+                              "type":0
+                          }
+                      },
+                      "size":{
+                          "height":96,
+                          "width":219
+                      },
+                      "type":"fault-injection"
+                  }
+              },
+              "offset":{
+                  "x":-376.83,
+                  "y":87.98
+              },
+              "scale":0.832,
+              "selected":{
+                  "id":"b93d622c-92ef-48b4-b6bb-57e1ce893ee3",
+                  "type":"node"
+              }
+          }
+      }
+  }`
+
+	json.Unmarshal([]byte(reqBody), route2)
+	ctx.SetInput(route2)
+	_, err = handler.Update(ctx)
+	assert.Nil(t, err)
+
+	//list
+	listInput := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 1}`
+	json.Unmarshal([]byte(reqBody), listInput)
+	ctx.SetInput(listInput)
+	retPage, err := handler.List(ctx)
+	assert.Nil(t, err)
+	dataPage := retPage.(*store.ListOutput)
+	assert.Equal(t, len(dataPage.Rows), 1)
+
+	//delete test data
+	inputDel := &BatchDelete{}
+	reqBody = `{"ids": "1"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+}
diff --git a/api/internal/handler/service/service.go b/api/internal/handler/service/service.go
index 47689ee..a85eda2 100644
--- a/api/internal/handler/service/service.go
+++ b/api/internal/handler/service/service.go
@@ -23,7 +23,6 @@ import (
 	"github.com/api7/go-jsonpatch"
 	"github.com/gin-gonic/gin"
 	"github.com/shiningrush/droplet"
-	"github.com/shiningrush/droplet/data"
 	"github.com/shiningrush/droplet/wrapper"
 	wgin "github.com/shiningrush/droplet/wrapper/gin"
 
@@ -73,7 +72,7 @@ func (h *Handler) Get(c droplet.Context) (interface{}, error) {
 
 type ListInput struct {
 	ID string `auto_read:"id,query"`
-	data.Pager
+	store.Pagination
 }
 
 func (h *Handler) List(c droplet.Context) (interface{}, error) {
@@ -161,8 +160,7 @@ func (h *Handler) Patch(c droplet.Context) (interface{}, error) {
 		}
 	}
 
-	err = patch.Apply(&stored)
-	if err != nil {
+	if err := patch.Apply(&stored); err != nil {
 		return nil, err
 	}
 
diff --git a/api/internal/handler/service/service_test.go b/api/internal/handler/service/service_test.go
new file mode 100644
index 0000000..86e098a
--- /dev/null
+++ b/api/internal/handler/service/service_test.go
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package service
+
+import (
+	"encoding/json"
+	"testing"
+	"time"
+
+	"github.com/shiningrush/droplet"
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
+	"github.com/apisix/manager-api/internal/core/storage"
+	"github.com/apisix/manager-api/internal/core/store"
+)
+
+func TestService(t *testing.T) {
+	// init
+	err := storage.InitETCDClient([]string{"127.0.0.1:2379"})
+	assert.Nil(t, err)
+	err = store.InitStores()
+	assert.Nil(t, err)
+
+	handler := &Handler{
+		serviceStore: store.GetStore(store.HubKeyService),
+	}
+	assert.NotNil(t, handler)
+
+	//create
+	ctx := droplet.NewContext()
+	service := &entity.Service{}
+	reqBody := `{
+      "id": "1",
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+      "upstream": {
+          "type": "roundrobin",
+          "nodes": [{
+              "host": "39.97.63.215",
+              "port": 80,
+              "weight": 1
+          }]
+      }
+  }`
+	json.Unmarshal([]byte(reqBody), service)
+	ctx.SetInput(service)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//get
+	input := &GetInput{}
+	input.ID = "1"
+	ctx.SetInput(input)
+	ret, err := handler.Get(ctx)
+	stored := ret.(*entity.Service)
+	assert.Nil(t, err)
+	assert.Equal(t, stored.ID, service.ID)
+
+	//update
+	service2 := &UpdateInput{}
+	service2.ID = "1"
+	reqBody = `{
+      "plugins": {
+          "limit-count": {
+              "count": 2,
+              "time_window": 60,
+              "rejected_code": 503,
+              "key": "remote_addr"
+          }
+      },
+      "upstream": {
+          "type": "roundrobin",
+          "nodes": [{
+              "host": "39.97.63.215",
+              "port": 80,
+              "weight": 1
+          }]
+      }
+  }`
+	json.Unmarshal([]byte(reqBody), service2)
+	ctx.SetInput(service2)
+	_, err = handler.Update(ctx)
+	assert.Nil(t, err)
+
+	//list
+	listInput := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 1}`
+	json.Unmarshal([]byte(reqBody), listInput)
+	ctx.SetInput(listInput)
+	retPage, err := handler.List(ctx)
+	assert.Nil(t, err)
+	dataPage := retPage.(*store.ListOutput)
+	assert.Equal(t, len(dataPage.Rows), 1)
+
+	//delete test data
+	inputDel := &BatchDelete{}
+	reqBody = `{"ids": "1"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+}
diff --git a/api/internal/handler/ssl/ssl.go b/api/internal/handler/ssl/ssl.go
index 641ddee..494eaea 100644
--- a/api/internal/handler/ssl/ssl.go
+++ b/api/internal/handler/ssl/ssl.go
@@ -23,14 +23,12 @@ import (
 	"encoding/pem"
 	"errors"
 	"fmt"
-	"log"
 	"reflect"
 	"strings"
 
 	"github.com/api7/go-jsonpatch"
 	"github.com/gin-gonic/gin"
 	"github.com/shiningrush/droplet"
-	"github.com/shiningrush/droplet/data"
 	"github.com/shiningrush/droplet/wrapper"
 	wgin "github.com/shiningrush/droplet/wrapper/gin"
 
@@ -91,7 +89,7 @@ func (h *Handler) Get(c droplet.Context) (interface{}, error) {
 
 type ListInput struct {
 	ID string `auto_read:"id,query"`
-	data.Pager
+	store.Pagination
 }
 
 func (h *Handler) List(c droplet.Context) (interface{}, error) {
@@ -156,7 +154,6 @@ func (h *Handler) Update(c droplet.Context) (interface{}, error) {
 	}
 
 	ssl.ID = input.ID
-	log.Println("ssl", ssl)
 	if err := h.sslStore.Update(c.Context(), ssl); err != nil {
 		return nil, err
 	}
diff --git a/api/internal/handler/ssl/ssl_test.go b/api/internal/handler/ssl/ssl_test.go
new file mode 100644
index 0000000..9a8b341
--- /dev/null
+++ b/api/internal/handler/ssl/ssl_test.go
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ssl
+
+import (
+	"encoding/json"
+	"testing"
+	"time"
+
+	"github.com/shiningrush/droplet"
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
+	"github.com/apisix/manager-api/internal/core/storage"
+	"github.com/apisix/manager-api/internal/core/store"
+)
+
+func TestSSL(t *testing.T) {
+	// init
+	err := storage.InitETCDClient([]string{"127.0.0.1:2379"})
+	assert.Nil(t, err)
+	err = store.InitStores()
+	assert.Nil(t, err)
+
+	handler := &Handler{
+		sslStore: store.GetStore(store.HubKeySsl),
+	}
+	assert.NotNil(t, handler)
+
+	//create
+	ctx := droplet.NewContext()
+	ssl := &entity.SSL{}
+	reqBody := `{
+    "id": "1",
+	  "key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDGO0J9xrOcmvgh\npkqHIYHCw35FTfIT5uXOSzdF49M2ZAKBQwFG0ovYT8bc0glNLB+hpDhJPL531qSP\nl1ZOe0W1ofP1u0T5Zzc9Rub/kn7RMPq0BsSC6J3rF+rQEwh1PM8qUuD8DxZ7jaOL\niMNL6SyuZIPsS1kPPBtsioukdo666tbjNMixhQbI9Wpg55abdXRFh3i7Zu/9siF1\njCGcsskjOaUOY4sYQ3i5WU/HIIRhA82XuIL+Sxd32P8bKi2UT1sqFXRjAVR7KRWo\nIVvkmSLoZb9ucV6MsccDrRYBf6rLbI1tFj9l2rY6GTFlT+6z7K/ZI60DGi/hsBfl\nDeEQ5WuxAgMBAAECggEAVHQQyucpxHGdfzCKlfGnh+Oj20Du/p2jkHUp [...]
+	  "cert": "-----BEGIN CERTIFICATE-----\nMIIEVzCCAr+gAwIBAgIQITiNM7xmudhg3pK85KDwLDANBgkqhkiG9w0BAQsFADB/\nMR4wHAYDVQQKExVta2NlcnQgZGV2ZWxvcG1lbnQgQ0ExKjAoBgNVBAsMIWp1bnh1\nY2hlbkBqdW54dWRlQWlyIChqdW54dSBjaGVuKTExMC8GA1UEAwwobWtjZXJ0IGp1\nbnh1Y2hlbkBqdW54dWRlQWlyIChqdW54dSBjaGVuKTAeFw0xOTA2MDEwMDAwMDBa\nFw0zMDA3MDgwNzQ4MDJaMFUxJzAlBgNVBAoTHm1rY2VydCBkZXZlbG9wbWVudCBj\nZXJ0aWZpY2F0ZTEqMCgGA1UECwwhanVueHVjaGVuQGp1bnh1ZGVBaXIgKGp1bnh1\nIGNoZW4pMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxjt [...]
+  }`
+	json.Unmarshal([]byte(reqBody), ssl)
+	ctx.SetInput(ssl)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//get
+	input := &GetInput{}
+	input.ID = "1"
+	ctx.SetInput(input)
+	ret, err := handler.Get(ctx)
+	stored := ret.(*entity.SSL)
+	assert.Nil(t, err)
+	assert.Equal(t, stored.ID, ssl.ID)
+
+	//update
+	ssl2 := &UpdateInput{}
+	ssl2.ID = "1"
+	reqBody = `{
+    "id": "1",
+	  "key": "-----BEGIN PRIVATE KEY-----\nMIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDGO0J9xrOcmvgh\npkqHIYHCw35FTfIT5uXOSzdF49M2ZAKBQwFG0ovYT8bc0glNLB+hpDhJPL531qSP\nl1ZOe0W1ofP1u0T5Zzc9Rub/kn7RMPq0BsSC6J3rF+rQEwh1PM8qUuD8DxZ7jaOL\niMNL6SyuZIPsS1kPPBtsioukdo666tbjNMixhQbI9Wpg55abdXRFh3i7Zu/9siF1\njCGcsskjOaUOY4sYQ3i5WU/HIIRhA82XuIL+Sxd32P8bKi2UT1sqFXRjAVR7KRWo\nIVvkmSLoZb9ucV6MsccDrRYBf6rLbI1tFj9l2rY6GTFlT+6z7K/ZI60DGi/hsBfl\nDeEQ5WuxAgMBAAECggEAVHQQyucpxHGdfzCKlfGnh+Oj20Du/p2jkHUp [...]
+	  "cert": "-----BEGIN CERTIFICATE-----\nMIIEVzCCAr+gAwIBAgIQITiNM7xmudhg3pK85KDwLDANBgkqhkiG9w0BAQsFADB/\nMR4wHAYDVQQKExVta2NlcnQgZGV2ZWxvcG1lbnQgQ0ExKjAoBgNVBAsMIWp1bnh1\nY2hlbkBqdW54dWRlQWlyIChqdW54dSBjaGVuKTExMC8GA1UEAwwobWtjZXJ0IGp1\nbnh1Y2hlbkBqdW54dWRlQWlyIChqdW54dSBjaGVuKTAeFw0xOTA2MDEwMDAwMDBa\nFw0zMDA3MDgwNzQ4MDJaMFUxJzAlBgNVBAoTHm1rY2VydCBkZXZlbG9wbWVudCBj\nZXJ0aWZpY2F0ZTEqMCgGA1UECwwhanVueHVjaGVuQGp1bnh1ZGVBaXIgKGp1bnh1\nIGNoZW4pMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxjt [...]
+  }`
+	json.Unmarshal([]byte(reqBody), ssl2)
+	ctx.SetInput(ssl2)
+	_, err = handler.Update(ctx)
+	assert.Nil(t, err)
+
+	//list
+	listInput := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 1}`
+	json.Unmarshal([]byte(reqBody), listInput)
+	ctx.SetInput(listInput)
+	retPage, err := handler.List(ctx)
+	assert.Nil(t, err)
+	dataPage := retPage.(*store.ListOutput)
+	assert.Equal(t, len(dataPage.Rows), 1)
+
+	//delete test data
+	inputDel := &BatchDelete{}
+	reqBody = `{"ids": "1"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+}
diff --git a/api/internal/handler/upstream/upstream.go b/api/internal/handler/upstream/upstream.go
index 3b24ade..3727eb7 100644
--- a/api/internal/handler/upstream/upstream.go
+++ b/api/internal/handler/upstream/upstream.go
@@ -23,7 +23,6 @@ import (
 	"github.com/api7/go-jsonpatch"
 	"github.com/gin-gonic/gin"
 	"github.com/shiningrush/droplet"
-	"github.com/shiningrush/droplet/data"
 	"github.com/shiningrush/droplet/wrapper"
 	wgin "github.com/shiningrush/droplet/wrapper/gin"
 
@@ -77,7 +76,7 @@ func (h *Handler) Get(c droplet.Context) (interface{}, error) {
 
 type ListInput struct {
 	ID string `auto_read:"id,query"`
-	data.Pager
+	store.Pagination
 }
 
 func (h *Handler) List(c droplet.Context) (interface{}, error) {
@@ -168,8 +167,7 @@ func (h *Handler) Patch(c droplet.Context) (interface{}, error) {
 		}
 	}
 
-	err = patch.Apply(&stored)
-	if err != nil {
+	if err := patch.Apply(&stored); err != nil {
 		return nil, err
 	}
 
diff --git a/api/internal/handler/upstream/upstream_test.go b/api/internal/handler/upstream/upstream_test.go
new file mode 100644
index 0000000..b58e053
--- /dev/null
+++ b/api/internal/handler/upstream/upstream_test.go
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package upstream
+
+import (
+	"encoding/json"
+	"testing"
+	"time"
+
+	"github.com/shiningrush/droplet"
+	"github.com/stretchr/testify/assert"
+
+	"github.com/apisix/manager-api/internal/core/entity"
+	"github.com/apisix/manager-api/internal/core/storage"
+	"github.com/apisix/manager-api/internal/core/store"
+)
+
+func TestUpstream(t *testing.T) {
+	// init
+	err := storage.InitETCDClient([]string{"127.0.0.1:2379"})
+	assert.Nil(t, err)
+	err = store.InitStores()
+	assert.Nil(t, err)
+
+	handler := &Handler{
+		upstreamStore: store.GetStore(store.HubKeyUpstream),
+	}
+	assert.NotNil(t, handler)
+
+	//create
+	ctx := droplet.NewContext()
+	upstream := &entity.Upstream{}
+	reqBody := `{
+    "id": "1",
+    "name": "upstream3",
+    "description": "upstream upstream",
+    "type": "roundrobin",
+    "nodes": [{
+              "host": "a.a.com",
+              "port": 80,
+              "weight": 1
+          }],
+    "timeout":{
+      "connect":15,
+          "send":15,
+          "read":15
+    },
+    "enable_websocket": true,
+      "hash_on": "header",
+      "key": "server_addr",
+      "checks": {
+          "active": {
+              "timeout": 5,
+              "http_path": "/status",
+              "host": "foo.com",
+              "healthy": {
+                  "interval": 2,
+                  "successes": 1
+              },
+              "unhealthy": {
+                  "interval": 1,
+                  "http_failures": 2
+              },
+              "req_headers": ["User-Agent: curl/7.29.0"]
+          },
+          "passive": {
+              "healthy": {
+                  "http_statuses": [200, 201],
+                  "successes": 3
+              },
+              "unhealthy": {
+                  "http_statuses": [500],
+                  "http_failures": 3,
+                  "tcp_failures": 3
+              }
+          }
+      }
+  }`
+	json.Unmarshal([]byte(reqBody), upstream)
+	ctx.SetInput(upstream)
+	_, err = handler.Create(ctx)
+	assert.Nil(t, err)
+
+	//sleep
+	time.Sleep(time.Duration(100) * time.Millisecond)
+
+	//get
+	input := &GetInput{}
+	input.ID = "1"
+	ctx.SetInput(input)
+	ret, err := handler.Get(ctx)
+	stored := ret.(*entity.Upstream)
+	assert.Nil(t, err)
+	assert.Equal(t, stored.ID, upstream.ID)
+
+	//update
+	upstream2 := &UpdateInput{}
+	upstream2.ID = "1"
+	reqBody = `{
+    "id": "aaa",
+    "name": "upstream3",
+    "description": "upstream upstream",
+    "type": "roundrobin",
+    "nodes": [{
+              "host": "a.a.com",
+              "port": 80,
+              "weight": 1
+          }],
+    "timeout":{
+      "connect":15,
+          "send":15,
+          "read":15
+    },
+    "enable_websocket": true,
+      "hash_on": "header",
+      "key": "server_addr",
+      "checks": {
+          "active": {
+              "timeout": 5,
+              "http_path": "/status",
+              "host": "foo.com",
+              "healthy": {
+                  "interval": 2,
+                  "successes": 1
+              },
+              "unhealthy": {
+                  "interval": 1,
+                  "http_failures": 2
+              },
+              "req_headers": ["User-Agent: curl/7.29.0"]
+          },
+          "passive": {
+              "healthy": {
+                  "http_statuses": [200, 201],
+                  "successes": 3
+              },
+              "unhealthy": {
+                  "http_statuses": [500],
+                  "http_failures": 3,
+                  "tcp_failures": 3
+              }
+          }
+      }
+  }`
+	json.Unmarshal([]byte(reqBody), upstream2)
+	ctx.SetInput(upstream2)
+	_, err = handler.Update(ctx)
+	assert.Nil(t, err)
+
+	//list
+	listInput := &ListInput{}
+	reqBody = `{"pageSize": 1, "page": 1}`
+	json.Unmarshal([]byte(reqBody), listInput)
+	ctx.SetInput(listInput)
+	retPage, err := handler.List(ctx)
+	assert.Nil(t, err)
+	dataPage := retPage.(*store.ListOutput)
+	assert.Equal(t, len(dataPage.Rows), 1)
+
+	//delete test data
+	inputDel := &BatchDelete{}
+	reqBody = `{"ids": "1"}`
+	json.Unmarshal([]byte(reqBody), inputDel)
+	ctx.SetInput(inputDel)
+	_, err = handler.BatchDelete(ctx)
+	assert.Nil(t, err)
+
+}