You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2018/08/08 17:53:54 UTC

[GitHub] yuxiangw closed pull request #10779: An example of differentially private deep learning using mxnet gluon

yuxiangw closed pull request #10779: An example of differentially private deep learning using mxnet gluon
URL: https://github.com/apache/incubator-mxnet/pull/10779
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/example/private-learning/README.md b/example/private-learning/README.md
new file mode 100644
index 00000000000..752ea171168
--- /dev/null
+++ b/example/private-learning/README.md
@@ -0,0 +1,11 @@
+## Differentially private deep learning 
+
+# This example illustrates how to use with `mxnet` `gluon` and `pydiffpriv` to do DPDL
+
+To use this example, you need to install pydiffpriv first using
+
+`pip install pydiffpriv --upgrade`
+
+If you do not have a GPU, just modify the line and specify the context to be mx.cpu().
+
+
diff --git a/example/private-learning/dpdl_utils.py b/example/private-learning/dpdl_utils.py
new file mode 100644
index 00000000000..1378272161e
--- /dev/null
+++ b/example/private-learning/dpdl_utils.py
@@ -0,0 +1,98 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This utility module allows one to access the mxnet.gluon.ParameterDict
+# and its corresponding gradients as a list of ndarray
+
+# Functions to update, manipulate and reset this list of ndarrays back and forth
+# are included in this module
+
+# Author: Yu-Xiang Wang
+
+from mxnet import nd
+import mxnet as mx
+
+
+def initialize_grad(params,ctx=mx.cpu()):
+    """ initialize a grad object with format just like those in paramter """
+    a=[]
+    for param in params.values():
+        a.append(nd.zeros(param.shape).as_in_context(ctx))
+    return a
+
+
+def reset_grad(grads):
+    for grad in grads:
+        grad[:] = 0
+
+
+def accumuate_grad(grads, params, thresh):  # accumuate the thresholded gradient
+    tmp=grad_norm_in_params(params)
+
+    if tmp > thresh:
+        factor = thresh / tmp
+    else:
+        factor = 1.0
+
+    for grad, param in zip(grads, params.values()):
+        grad[:] += param.grad() * factor
+
+def accumulate_params(param_cumu, params, n):
+    for param2,param in zip(param_cumu, params.values()):
+        param2[:] = param2 *(n-1)/n + param.data() /n
+
+def iir_filter(mm,gg,beta,order):# helps to accumuate the gradients and second momeent of adam
+    for m,g in zip(mm,gg):
+        m[:] = beta*m + (1-beta)*(g**order)
+
+
+def extract_grad(params, grads):
+    """ get the gradient attached to "params" and assign to "grads" """
+    for param,grad in zip(params.values(), grads):
+        grad[:] = param.grad()
+
+def grad_norm_in_params(params):
+    """Calculate the Euclidean norm of the parameters in grad list grads """
+    a=0
+    for item in params.values():
+        a += nd.sum(item.grad() ** 2).asscalar()
+    return a ** 0.5
+
+
+def grad_norm(grads):
+    """Calculate the Euclidean norm of the parameters in grad list grads """
+    a=0
+    for item in grads:
+        a += nd.sum(item ** 2).asscalar()
+    return a ** 0.5
+
+
+def grad_rescale(grads, k):
+    """scale the gradient by a factor of k"""
+    y = grads.deepcopy()
+    for item in y:
+        item[:] = item * k
+    return y # return the parameters
+
+
+def grad_add(grads_batch):
+    """add up the list of gradients in lists"""
+    y = grads_batch[0].deepcopy()
+    for xx in grads_batch:
+        for item1,item2 in zip(xx,y):
+            item2 += item1
+    return y # return the parameters with a different gradient
diff --git a/example/private-learning/example_dpdl.ipynb b/example/private-learning/example_dpdl.ipynb
new file mode 100644
index 00000000000..fb4f44b481f
--- /dev/null
+++ b/example/private-learning/example_dpdl.ipynb
@@ -0,0 +1,996 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Train a deep learning model with differential privacy"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# import packages for DP\n",
+    "from pydiffpriv import cgfbank, dpacct\n",
+    "\n",
+    "# import packages needed for deep learning\n",
+    "import mxnet as mx\n",
+    "from mxnet import nd, autograd\n",
+    "from mxnet import gluon\n",
+    "import dpdl_utils\n",
+    "\n",
+    "ctx = mx.gpu()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Get data:  standard MNIST"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "mnist = mx.test_utils.get_mnist()\n",
+    "num_inputs = 784\n",
+    "num_outputs = 10\n",
+    "batch_size = 1 # this is set to get per-example gradient"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "train_data = mx.io.NDArrayIter(mnist[\"train_data\"], mnist[\"train_label\"],\n",
+    "                               batch_size, shuffle=True)\n",
+    "test_data = mx.io.NDArrayIter(mnist[\"test_data\"], mnist[\"test_label\"],\n",
+    "                              64, shuffle=True)\n",
+    "train_data2 = mx.io.NDArrayIter(mnist[\"train_data\"], mnist[\"train_label\"],\n",
+    "                               64, shuffle=True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Build a one hidden layer NN with Gluon"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "num_hidden = 1000\n",
+    "net = gluon.nn.HybridSequential()\n",
+    "with net.name_scope():\n",
+    "    net.add(gluon.nn.Dense(num_hidden, in_units=num_inputs,activation=\"relu\"))\n",
+    "    net.add(gluon.nn.Dense(num_outputs,in_units=num_hidden))\n",
+    "\n",
+    "# get and save the parameters\n",
+    "params = net.collect_params()\n",
+    "params.initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx)\n",
+    "params.setattr('grad_req', 'write')\n",
+    "    \n",
+    "# define loss function\n",
+    "softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Use a new optimizer called privateSGD\n",
+    "Basically, we add Gaussian noise to the stochastic gradient."
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 5,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# define the update rule\n",
+    "def privateSGD(x, g, lr, sigma,wd=0.0,ctx=mx.cpu()):\n",
+    "    for (param,grad) in zip(x.values(), g):\n",
+    "        v=param.data()\n",
+    "        v[:] = v - lr * (grad +wd*v+ sigma*nd.random_normal(shape = grad.shape).as_in_context(ctx))\n",
+    "# Utility function to evaluate error\n",
+    "\n",
+    "def evaluate_accuracy(data_iterator, net):\n",
+    "    acc = mx.metric.Accuracy()\n",
+    "    loss_fun = .0\n",
+    "    data_iterator.reset()\n",
+    "    for i, batch in enumerate(data_iterator):\n",
+    "        data = batch.data[0].as_in_context(ctx).reshape((-1, 784))\n",
+    "        label = batch.label[0].as_in_context(ctx)\n",
+    "        output = net(data)\n",
+    "        predictions = nd.argmax(output, axis=1)\n",
+    "        acc.update(preds=predictions, labels=label)\n",
+    "        loss = softmax_cross_entropy(output, label)\n",
+    "        loss_fun = loss_fun*i/(i+1) + nd.mean(loss).asscalar()/(i+1)\n",
+    "    return acc.get()[1], loss_fun"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Now let's try attaching a privacy accountant to this data set"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# declare a moment accountant from pydiffpriv\n",
+    "DPobject = dpacct.anaCGFAcct()\n",
+    "\n",
+    "# Specify privacy specific inputs\n",
+    "thresh = 4.0 # limit the norm of individual gradient\n",
+    "sigma = thresh\n",
+    "\n",
+    "delta = 1e-5\n",
+    "\n",
+    "func = lambda x: cgfbank.CGF_gaussian({'sigma': sigma/thresh}, x)\n"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## We now specify the parameters needed for learning"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 7,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "# \n",
+    "epochs = 10\n",
+    "learning_rate = .1\n",
+    "\n",
+    "n = train_data.num_data\n",
+    "batchsz = 100 #\n",
+    "\n",
+    "count = 0\n",
+    "niter=0\n",
+    "moving_loss = 0\n",
+    "\n",
+    "grads = dpdl_utils.initialize_grad(params,ctx=ctx)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Let's start then!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 8,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "# declare a few place holder for logging\n",
+    "logs = {}\n",
+    "logs['eps'] = []\n",
+    "logs['loss'] = []\n",
+    "logs['MAloss'] = []\n",
+    "logs['train_acc'] = []\n",
+    "logs['test_acc'] = []"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 9,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "/usr/local/lib/python2.7/dist-packages/scipy/optimize/_minimize.py:600: RuntimeWarning: Method 'bounded' does not support relative tolerance in x; defaulting to absolute tolerance.\n",
+      "  \"defaulting to absolute tolerance.\", RuntimeWarning)\n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[10] Loss: 2.06943768484. Privacy loss: eps = 0.971358107778, delta = 1e-05 \n",
+      "[20] Loss: 1.84368190734. Privacy loss: eps = 0.982252802702, delta = 1e-05 \n",
+      "[30] Loss: 1.57019112369. Privacy loss: eps = 0.993147497626, delta = 1e-05 \n",
+      "[40] Loss: 1.35744524226. Privacy loss: eps = 1.00404219255, delta = 1e-05 \n",
+      "[50] Loss: 1.15318951282. Privacy loss: eps = 1.01493688747, delta = 1e-05 \n",
+      "[60] Loss: 0.984449914272. Privacy loss: eps = 1.0258315824, delta = 1e-05 \n",
+      "[70] Loss: 0.910787531798. Privacy loss: eps = 1.03672627732, delta = 1e-05 \n",
+      "[80] Loss: 0.790613026683. Privacy loss: eps = 1.04762097225, delta = 1e-05 \n",
+      "[90] Loss: 0.729610366731. Privacy loss: eps = 1.05558451198, delta = 1e-05 \n",
+      "[100] Loss: 0.68380270946. Privacy loss: eps = 1.05671841883, delta = 1e-05 \n",
+      "[110] Loss: 0.626570992514. Privacy loss: eps = 1.05793876326, delta = 1e-05 \n",
+      "[120] Loss: 0.610082364481. Privacy loss: eps = 1.05928483483, delta = 1e-05 \n",
+      "[130] Loss: 0.591632013549. Privacy loss: eps = 1.05986112332, delta = 1e-05 \n",
+      "[140] Loss: 0.589817150998. Privacy loss: eps = 1.06106607868, delta = 1e-05 \n",
+      "[150] Loss: 0.568649335098. Privacy loss: eps = 1.06214004147, delta = 1e-05 \n",
+      "[160] Loss: 0.560678415022. Privacy loss: eps = 1.06296268762, delta = 1e-05 \n",
+      "[170] Loss: 0.510013231497. Privacy loss: eps = 1.06471616838, delta = 1e-05 \n",
+      "[180] Loss: 0.533987376838. Privacy loss: eps = 1.06522926248, delta = 1e-05 \n",
+      "[190] Loss: 0.475320562285. Privacy loss: eps = 1.06613796548, delta = 1e-05 \n",
+      "[200] Loss: 0.500565005432. Privacy loss: eps = 1.06744615878, delta = 1e-05 \n",
+      "[210] Loss: 0.502786411187. Privacy loss: eps = 1.06867691484, delta = 1e-05 \n",
+      "[220] Loss: 0.539595362648. Privacy loss: eps = 1.06874471978, delta = 1e-05 \n",
+      "[230] Loss: 0.493377678304. Privacy loss: eps = 1.06983388459, delta = 1e-05 \n",
+      "[240] Loss: 0.489991231642. Privacy loss: eps = 1.07144822359, delta = 1e-05 \n",
+      "[250] Loss: 0.513111550756. Privacy loss: eps = 1.0730441224, delta = 1e-05 \n",
+      "[260] Loss: 0.470535297862. Privacy loss: eps = 1.07408035, delta = 1e-05 \n",
+      "[270] Loss: 0.44762081367. Privacy loss: eps = 1.07394060927, delta = 1e-05 \n",
+      "[280] Loss: 0.446922983993. Privacy loss: eps = 1.07437725537, delta = 1e-05 \n",
+      "[290] Loss: 0.431479100306. Privacy loss: eps = 1.07541609197, delta = 1e-05 \n",
+      "[300] Loss: 0.430995691873. Privacy loss: eps = 1.07684758682, delta = 1e-05 \n",
+      "[310] Loss: 0.40416618629. Privacy loss: eps = 1.07825104228, delta = 1e-05 \n",
+      "[320] Loss: 0.393726859071. Privacy loss: eps = 1.07962895191, delta = 1e-05 \n",
+      "[330] Loss: 0.398488523765. Privacy loss: eps = 1.08098352215, delta = 1e-05 \n",
+      "[340] Loss: 0.416648330793. Privacy loss: eps = 1.08231671243, delta = 1e-05 \n",
+      "[350] Loss: 0.411217850224. Privacy loss: eps = 1.08363026882, delta = 1e-05 \n",
+      "[360] Loss: 0.440469309187. Privacy loss: eps = 1.08492575231, delta = 1e-05 \n",
+      "[370] Loss: 0.425072934575. Privacy loss: eps = 1.08540745197, delta = 1e-05 \n",
+      "[380] Loss: 0.397746497497. Privacy loss: eps = 1.0856792819, delta = 1e-05 \n",
+      "[390] Loss: 0.410262027337. Privacy loss: eps = 1.08671801809, delta = 1e-05 \n",
+      "[400] Loss: 0.413824302512. Privacy loss: eps = 1.08775675429, delta = 1e-05 \n",
+      "[410] Loss: 0.424365740075. Privacy loss: eps = 1.08879549048, delta = 1e-05 \n",
+      "[420] Loss: 0.448497539247. Privacy loss: eps = 1.08983422667, delta = 1e-05 \n",
+      "[430] Loss: 0.445240583511. Privacy loss: eps = 1.09087296286, delta = 1e-05 \n",
+      "[440] Loss: 0.439980964031. Privacy loss: eps = 1.09191169906, delta = 1e-05 \n",
+      "[450] Loss: 0.500597951575. Privacy loss: eps = 1.09295043525, delta = 1e-05 \n",
+      "[460] Loss: 0.450033899991. Privacy loss: eps = 1.09398917144, delta = 1e-05 \n",
+      "[470] Loss: 0.448611723875. Privacy loss: eps = 1.09435099753, delta = 1e-05 \n",
+      "[480] Loss: 0.440667823808. Privacy loss: eps = 1.09533968218, delta = 1e-05 \n",
+      "[490] Loss: 0.456676732033. Privacy loss: eps = 1.09632836682, delta = 1e-05 \n",
+      "[500] Loss: 0.434587862485. Privacy loss: eps = 1.09731705147, delta = 1e-05 \n",
+      "[510] Loss: 0.429920575675. Privacy loss: eps = 1.09830573612, delta = 1e-05 \n",
+      "[520] Loss: 0.437657698538. Privacy loss: eps = 1.09929442076, delta = 1e-05 \n",
+      "[530] Loss: 0.439406347625. Privacy loss: eps = 1.10028310541, delta = 1e-05 \n",
+      "[540] Loss: 0.470364095778. Privacy loss: eps = 1.10127179006, delta = 1e-05 \n",
+      "[550] Loss: 0.443624512549. Privacy loss: eps = 1.1022604747, delta = 1e-05 \n",
+      "[560] Loss: 0.492805233106. Privacy loss: eps = 1.10324915935, delta = 1e-05 \n",
+      "[570] Loss: 0.463925216622. Privacy loss: eps = 1.104237844, delta = 1e-05 \n",
+      "[580] Loss: 0.514945238281. Privacy loss: eps = 1.10522652864, delta = 1e-05 \n",
+      "[590] Loss: 0.46533568925. Privacy loss: eps = 1.10621521329, delta = 1e-05 \n",
+      "[600] Loss: 0.453799217353. Privacy loss: eps = 1.10720389794, delta = 1e-05 \n",
+      "Net: Epoch 0. Train Loss: 0.431228026954, Test Loss: 0.396569612157, Train_acc 0.880746934968, Test_acc 0.88863455414\n",
+      "[610] Loss: 0.401522509926. Privacy loss: eps = 1.10819258258, delta = 1e-05 \n",
+      "[620] Loss: 0.439949518357. Privacy loss: eps = 1.10918126723, delta = 1e-05 \n",
+      "[630] Loss: 0.439447330654. Privacy loss: eps = 1.11016995188, delta = 1e-05 \n",
+      "[640] Loss: 0.474663362432. Privacy loss: eps = 1.11115863652, delta = 1e-05 \n",
+      "[650] Loss: 0.407918588787. Privacy loss: eps = 1.11214732117, delta = 1e-05 \n",
+      "[660] Loss: 0.367338833. Privacy loss: eps = 1.11313600582, delta = 1e-05 \n",
+      "[670] Loss: 0.390222178197. Privacy loss: eps = 1.11412469046, delta = 1e-05 \n",
+      "[680] Loss: 0.421657112204. Privacy loss: eps = 1.11511337511, delta = 1e-05 \n",
+      "[690] Loss: 0.402939604989. Privacy loss: eps = 1.11610205976, delta = 1e-05 \n",
+      "[700] Loss: 0.439577195602. Privacy loss: eps = 1.1170907444, delta = 1e-05 \n",
+      "[710] Loss: 0.430496710969. Privacy loss: eps = 1.11807942905, delta = 1e-05 \n",
+      "[720] Loss: 0.459983484699. Privacy loss: eps = 1.1190681137, delta = 1e-05 \n",
+      "[730] Loss: 0.451340284462. Privacy loss: eps = 1.12005679834, delta = 1e-05 \n",
+      "[740] Loss: 0.417040587925. Privacy loss: eps = 1.12104548299, delta = 1e-05 \n",
+      "[750] Loss: 0.415409131591. Privacy loss: eps = 1.12203416764, delta = 1e-05 \n",
+      "[760] Loss: 0.4497556925. Privacy loss: eps = 1.12302285228, delta = 1e-05 \n",
+      "[770] Loss: 0.413815534886. Privacy loss: eps = 1.12401153693, delta = 1e-05 \n",
+      "[780] Loss: 0.423136311098. Privacy loss: eps = 1.12500022158, delta = 1e-05 \n",
+      "[790] Loss: 0.441771753681. Privacy loss: eps = 1.12598890622, delta = 1e-05 \n",
+      "[800] Loss: 0.432617555188. Privacy loss: eps = 1.12697759087, delta = 1e-05 \n",
+      "[810] Loss: 0.441119731886. Privacy loss: eps = 1.12796627552, delta = 1e-05 \n",
+      "[820] Loss: 0.406141945254. Privacy loss: eps = 1.12895496016, delta = 1e-05 \n",
+      "[830] Loss: 0.403918068141. Privacy loss: eps = 1.12994364481, delta = 1e-05 \n",
+      "[840] Loss: 0.458908673647. Privacy loss: eps = 1.13093232946, delta = 1e-05 \n",
+      "[850] Loss: 0.477745396851. Privacy loss: eps = 1.1319210141, delta = 1e-05 \n",
+      "[860] Loss: 0.393503091111. Privacy loss: eps = 1.13290969875, delta = 1e-05 \n",
+      "[870] Loss: 0.417113165043. Privacy loss: eps = 1.1338983834, delta = 1e-05 \n",
+      "[880] Loss: 0.4101525086. Privacy loss: eps = 1.13488706804, delta = 1e-05 \n",
+      "[890] Loss: 0.456332868588. Privacy loss: eps = 1.13587575269, delta = 1e-05 \n",
+      "[900] Loss: 0.450513659996. Privacy loss: eps = 1.13686443734, delta = 1e-05 \n",
+      "[910] Loss: 0.436887282029. Privacy loss: eps = 1.13785312198, delta = 1e-05 \n",
+      "[920] Loss: 0.400968878413. Privacy loss: eps = 1.13884180663, delta = 1e-05 \n",
+      "[930] Loss: 0.36592378368. Privacy loss: eps = 1.13983049128, delta = 1e-05 \n",
+      "[940] Loss: 0.441811171674. Privacy loss: eps = 1.14081917592, delta = 1e-05 \n",
+      "[950] Loss: 0.416963096079. Privacy loss: eps = 1.14180786057, delta = 1e-05 \n",
+      "[960] Loss: 0.425341565716. Privacy loss: eps = 1.14279654522, delta = 1e-05 \n",
+      "[970] Loss: 0.415913221378. Privacy loss: eps = 1.14378522986, delta = 1e-05 \n",
+      "[980] Loss: 0.381859271375. Privacy loss: eps = 1.14477391451, delta = 1e-05 \n",
+      "[990] Loss: 0.407644492042. Privacy loss: eps = 1.14576259916, delta = 1e-05 \n",
+      "[1000] Loss: 0.39315528008. Privacy loss: eps = 1.1467512838, delta = 1e-05 \n",
+      "[1010] Loss: 0.378491621232. Privacy loss: eps = 1.14773996845, delta = 1e-05 \n",
+      "[1020] Loss: 0.382205013901. Privacy loss: eps = 1.1487286531, delta = 1e-05 \n",
+      "[1030] Loss: 0.398064184315. Privacy loss: eps = 1.14971733774, delta = 1e-05 \n",
+      "[1040] Loss: 0.424013992286. Privacy loss: eps = 1.15070602239, delta = 1e-05 \n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[1050] Loss: 0.37405346199. Privacy loss: eps = 1.15169470704, delta = 1e-05 \n",
+      "[1060] Loss: 0.379020628075. Privacy loss: eps = 1.15268339168, delta = 1e-05 \n",
+      "[1070] Loss: 0.481569697532. Privacy loss: eps = 1.15367207633, delta = 1e-05 \n",
+      "[1080] Loss: 0.414652383963. Privacy loss: eps = 1.15466076098, delta = 1e-05 \n",
+      "[1090] Loss: 0.421357898483. Privacy loss: eps = 1.15564944562, delta = 1e-05 \n",
+      "[1100] Loss: 0.443314762182. Privacy loss: eps = 1.15663813027, delta = 1e-05 \n",
+      "[1110] Loss: 0.396182640875. Privacy loss: eps = 1.15762681492, delta = 1e-05 \n",
+      "[1120] Loss: 0.36759888737. Privacy loss: eps = 1.15861549956, delta = 1e-05 \n",
+      "[1130] Loss: 0.424910554504. Privacy loss: eps = 1.15960418421, delta = 1e-05 \n",
+      "[1140] Loss: 0.423095673268. Privacy loss: eps = 1.16059286886, delta = 1e-05 \n",
+      "[1150] Loss: 0.442024398655. Privacy loss: eps = 1.1615815535, delta = 1e-05 \n",
+      "[1160] Loss: 0.436329978302. Privacy loss: eps = 1.16257023815, delta = 1e-05 \n",
+      "[1170] Loss: 0.36249281248. Privacy loss: eps = 1.1635589228, delta = 1e-05 \n",
+      "[1180] Loss: 0.377976277585. Privacy loss: eps = 1.16454760744, delta = 1e-05 \n",
+      "[1190] Loss: 0.384578396717. Privacy loss: eps = 1.16553629209, delta = 1e-05 \n",
+      "[1200] Loss: 0.391271105851. Privacy loss: eps = 1.16652497674, delta = 1e-05 \n",
+      "Net: Epoch 1. Train Loss: 0.403571312377, Test Loss: 0.376558056374, Train_acc 0.892307436034, Test_acc 0.898288216561\n",
+      "[1210] Loss: 0.374481910543. Privacy loss: eps = 1.16751366138, delta = 1e-05 \n",
+      "[1220] Loss: 0.377295080495. Privacy loss: eps = 1.16850234603, delta = 1e-05 \n",
+      "[1230] Loss: 0.392906004105. Privacy loss: eps = 1.16949103068, delta = 1e-05 \n",
+      "[1240] Loss: 0.402826208312. Privacy loss: eps = 1.17047971532, delta = 1e-05 \n",
+      "[1250] Loss: 0.390231937314. Privacy loss: eps = 1.17146839997, delta = 1e-05 \n",
+      "[1260] Loss: 0.45121118638. Privacy loss: eps = 1.17245708462, delta = 1e-05 \n",
+      "[1270] Loss: 0.476477118855. Privacy loss: eps = 1.17344576926, delta = 1e-05 \n",
+      "[1280] Loss: 0.459138805115. Privacy loss: eps = 1.17443445391, delta = 1e-05 \n",
+      "[1290] Loss: 0.430366161946. Privacy loss: eps = 1.17542313856, delta = 1e-05 \n",
+      "[1300] Loss: 0.383085665355. Privacy loss: eps = 1.1764118232, delta = 1e-05 \n",
+      "[1310] Loss: 0.41271591503. Privacy loss: eps = 1.17740050785, delta = 1e-05 \n",
+      "[1320] Loss: 0.355848296317. Privacy loss: eps = 1.1783891925, delta = 1e-05 \n",
+      "[1330] Loss: 0.360290606742. Privacy loss: eps = 1.17937787714, delta = 1e-05 \n",
+      "[1340] Loss: 0.373469069196. Privacy loss: eps = 1.18036656179, delta = 1e-05 \n",
+      "[1350] Loss: 0.400550847206. Privacy loss: eps = 1.18135524644, delta = 1e-05 \n",
+      "[1360] Loss: 0.38328552058. Privacy loss: eps = 1.18234393108, delta = 1e-05 \n",
+      "[1370] Loss: 0.405612952807. Privacy loss: eps = 1.18333261573, delta = 1e-05 \n",
+      "[1380] Loss: 0.394957559153. Privacy loss: eps = 1.18432130038, delta = 1e-05 \n",
+      "[1390] Loss: 0.44163472749. Privacy loss: eps = 1.18530998502, delta = 1e-05 \n",
+      "[1400] Loss: 0.481922038078. Privacy loss: eps = 1.18629866967, delta = 1e-05 \n",
+      "[1410] Loss: 0.407323431589. Privacy loss: eps = 1.18728735432, delta = 1e-05 \n",
+      "[1420] Loss: 0.438033435644. Privacy loss: eps = 1.18827603896, delta = 1e-05 \n",
+      "[1430] Loss: 0.429398041409. Privacy loss: eps = 1.18926472361, delta = 1e-05 \n",
+      "[1440] Loss: 0.389194110528. Privacy loss: eps = 1.19025340826, delta = 1e-05 \n",
+      "[1450] Loss: 0.351017278217. Privacy loss: eps = 1.1912420929, delta = 1e-05 \n",
+      "[1460] Loss: 0.366874359558. Privacy loss: eps = 1.19223077755, delta = 1e-05 \n",
+      "[1470] Loss: 0.360052680735. Privacy loss: eps = 1.1932194622, delta = 1e-05 \n",
+      "[1480] Loss: 0.392302064896. Privacy loss: eps = 1.19420814684, delta = 1e-05 \n",
+      "[1490] Loss: 0.361739760962. Privacy loss: eps = 1.19519683149, delta = 1e-05 \n",
+      "[1500] Loss: 0.391094844236. Privacy loss: eps = 1.19618551614, delta = 1e-05 \n",
+      "[1510] Loss: 0.352081015263. Privacy loss: eps = 1.19717420078, delta = 1e-05 \n",
+      "[1520] Loss: 0.36378527082. Privacy loss: eps = 1.19816288543, delta = 1e-05 \n",
+      "[1530] Loss: 0.372512590751. Privacy loss: eps = 1.19915157008, delta = 1e-05 \n",
+      "[1540] Loss: 0.362664950008. Privacy loss: eps = 1.20014025472, delta = 1e-05 \n",
+      "[1550] Loss: 0.389086511458. Privacy loss: eps = 1.20112893937, delta = 1e-05 \n",
+      "[1560] Loss: 0.375088195025. Privacy loss: eps = 1.20211762402, delta = 1e-05 \n",
+      "[1570] Loss: 0.416500473962. Privacy loss: eps = 1.20310630866, delta = 1e-05 \n",
+      "[1580] Loss: 0.333525471279. Privacy loss: eps = 1.20409499331, delta = 1e-05 \n",
+      "[1590] Loss: 0.350306544967. Privacy loss: eps = 1.20508367796, delta = 1e-05 \n",
+      "[1600] Loss: 0.374002873453. Privacy loss: eps = 1.2060723626, delta = 1e-05 \n",
+      "[1610] Loss: 0.407660834647. Privacy loss: eps = 1.20706104725, delta = 1e-05 \n",
+      "[1620] Loss: 0.361907730476. Privacy loss: eps = 1.2080497319, delta = 1e-05 \n",
+      "[1630] Loss: 0.35330274856. Privacy loss: eps = 1.20903841654, delta = 1e-05 \n",
+      "[1640] Loss: 0.390312679763. Privacy loss: eps = 1.21002710119, delta = 1e-05 \n",
+      "[1650] Loss: 0.4073261256. Privacy loss: eps = 1.21101578584, delta = 1e-05 \n",
+      "[1660] Loss: 0.413463891154. Privacy loss: eps = 1.21200447048, delta = 1e-05 \n",
+      "[1670] Loss: 0.356219219931. Privacy loss: eps = 1.21299315513, delta = 1e-05 \n",
+      "[1680] Loss: 0.405560850598. Privacy loss: eps = 1.21398183978, delta = 1e-05 \n",
+      "[1690] Loss: 0.409493609305. Privacy loss: eps = 1.21497052442, delta = 1e-05 \n",
+      "[1700] Loss: 0.391578167574. Privacy loss: eps = 1.21595920907, delta = 1e-05 \n",
+      "[1710] Loss: 0.388605422573. Privacy loss: eps = 1.21694789372, delta = 1e-05 \n",
+      "[1720] Loss: 0.415724566375. Privacy loss: eps = 1.21793657836, delta = 1e-05 \n",
+      "[1730] Loss: 0.361701139549. Privacy loss: eps = 1.21892526301, delta = 1e-05 \n",
+      "[1740] Loss: 0.382185041154. Privacy loss: eps = 1.21991394766, delta = 1e-05 \n",
+      "[1750] Loss: 0.373045655717. Privacy loss: eps = 1.2209026323, delta = 1e-05 \n",
+      "[1760] Loss: 0.409268181164. Privacy loss: eps = 1.22189131695, delta = 1e-05 \n",
+      "[1770] Loss: 0.440172443146. Privacy loss: eps = 1.2228800016, delta = 1e-05 \n",
+      "[1780] Loss: 0.437376461146. Privacy loss: eps = 1.22386868624, delta = 1e-05 \n",
+      "[1790] Loss: 0.397955467735. Privacy loss: eps = 1.22485737089, delta = 1e-05 \n",
+      "[1800] Loss: 0.372872465941. Privacy loss: eps = 1.22584605554, delta = 1e-05 \n",
+      "Net: Epoch 2. Train Loss: 0.385458087394, Test Loss: 0.360604184828, Train_acc 0.898404184435, Test_acc 0.903861464968\n",
+      "[1810] Loss: 0.371263439689. Privacy loss: eps = 1.22683474018, delta = 1e-05 \n",
+      "[1820] Loss: 0.34732978032. Privacy loss: eps = 1.22782342483, delta = 1e-05 \n",
+      "[1830] Loss: 0.319434051717. Privacy loss: eps = 1.22881210948, delta = 1e-05 \n",
+      "[1840] Loss: 0.355759015872. Privacy loss: eps = 1.22980079412, delta = 1e-05 \n",
+      "[1850] Loss: 0.366003181863. Privacy loss: eps = 1.23078947877, delta = 1e-05 \n",
+      "[1860] Loss: 0.36173130665. Privacy loss: eps = 1.23177816342, delta = 1e-05 \n",
+      "[1870] Loss: 0.356638315535. Privacy loss: eps = 1.23276684806, delta = 1e-05 \n",
+      "[1880] Loss: 0.341243483035. Privacy loss: eps = 1.23375553271, delta = 1e-05 \n",
+      "[1890] Loss: 0.355952164677. Privacy loss: eps = 1.23474421736, delta = 1e-05 \n",
+      "[1900] Loss: 0.339160214528. Privacy loss: eps = 1.235732902, delta = 1e-05 \n",
+      "[1910] Loss: 0.382213951992. Privacy loss: eps = 1.23672158665, delta = 1e-05 \n",
+      "[1920] Loss: 0.387558392182. Privacy loss: eps = 1.2377102713, delta = 1e-05 \n",
+      "[1930] Loss: 0.391194002486. Privacy loss: eps = 1.23869895594, delta = 1e-05 \n",
+      "[1940] Loss: 0.35331510957. Privacy loss: eps = 1.23968764059, delta = 1e-05 \n",
+      "[1950] Loss: 0.338300289397. Privacy loss: eps = 1.24067632524, delta = 1e-05 \n",
+      "[1960] Loss: 0.404437957437. Privacy loss: eps = 1.24166500988, delta = 1e-05 \n",
+      "[1970] Loss: 0.350907407664. Privacy loss: eps = 1.24265369453, delta = 1e-05 \n",
+      "[1980] Loss: 0.392694346562. Privacy loss: eps = 1.24364237918, delta = 1e-05 \n",
+      "[1990] Loss: 0.363571750302. Privacy loss: eps = 1.24463106382, delta = 1e-05 \n",
+      "[2000] Loss: 0.390339702768. Privacy loss: eps = 1.24561974847, delta = 1e-05 \n",
+      "[2010] Loss: 0.390034063917. Privacy loss: eps = 1.24660843312, delta = 1e-05 \n",
+      "[2020] Loss: 0.434966411582. Privacy loss: eps = 1.24759711776, delta = 1e-05 \n",
+      "[2030] Loss: 0.481813393588. Privacy loss: eps = 1.24858580241, delta = 1e-05 \n",
+      "[2040] Loss: 0.44467230802. Privacy loss: eps = 1.24957448706, delta = 1e-05 \n",
+      "[2050] Loss: 0.392401446768. Privacy loss: eps = 1.2505631717, delta = 1e-05 \n",
+      "[2060] Loss: 0.390777678857. Privacy loss: eps = 1.25155185635, delta = 1e-05 \n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[2070] Loss: 0.424228718343. Privacy loss: eps = 1.252540541, delta = 1e-05 \n",
+      "[2080] Loss: 0.389119848138. Privacy loss: eps = 1.25352922564, delta = 1e-05 \n",
+      "[2090] Loss: 0.351585892951. Privacy loss: eps = 1.25451791029, delta = 1e-05 \n",
+      "[2100] Loss: 0.346605920987. Privacy loss: eps = 1.25550659494, delta = 1e-05 \n",
+      "[2110] Loss: 0.393413243511. Privacy loss: eps = 1.25649527958, delta = 1e-05 \n",
+      "[2120] Loss: 0.394907557643. Privacy loss: eps = 1.25748396423, delta = 1e-05 \n",
+      "[2130] Loss: 0.390599799428. Privacy loss: eps = 1.25847264888, delta = 1e-05 \n",
+      "[2140] Loss: 0.407522489963. Privacy loss: eps = 1.25946133352, delta = 1e-05 \n",
+      "[2150] Loss: 0.337147176275. Privacy loss: eps = 1.26045001817, delta = 1e-05 \n",
+      "[2160] Loss: 0.376419239425. Privacy loss: eps = 1.26143870282, delta = 1e-05 \n",
+      "[2170] Loss: 0.362870277057. Privacy loss: eps = 1.26242738746, delta = 1e-05 \n",
+      "[2180] Loss: 0.355825397455. Privacy loss: eps = 1.26341607211, delta = 1e-05 \n",
+      "[2190] Loss: 0.369064259498. Privacy loss: eps = 1.26440475676, delta = 1e-05 \n",
+      "[2200] Loss: 0.378300851542. Privacy loss: eps = 1.2653934414, delta = 1e-05 \n",
+      "[2210] Loss: 0.334987718053. Privacy loss: eps = 1.26638212605, delta = 1e-05 \n",
+      "[2220] Loss: 0.325658457336. Privacy loss: eps = 1.2673708107, delta = 1e-05 \n",
+      "[2230] Loss: 0.381122519389. Privacy loss: eps = 1.26835949534, delta = 1e-05 \n",
+      "[2240] Loss: 0.342874310529. Privacy loss: eps = 1.26934817999, delta = 1e-05 \n",
+      "[2250] Loss: 0.371026489213. Privacy loss: eps = 1.27033686464, delta = 1e-05 \n",
+      "[2260] Loss: 0.336489877274. Privacy loss: eps = 1.27132554928, delta = 1e-05 \n",
+      "[2270] Loss: 0.382197708024. Privacy loss: eps = 1.27231423393, delta = 1e-05 \n",
+      "[2280] Loss: 0.37747245843. Privacy loss: eps = 1.27330291858, delta = 1e-05 \n",
+      "[2290] Loss: 0.422247248422. Privacy loss: eps = 1.27429160322, delta = 1e-05 \n",
+      "[2300] Loss: 0.392457284167. Privacy loss: eps = 1.27528028787, delta = 1e-05 \n",
+      "[2310] Loss: 0.418931967582. Privacy loss: eps = 1.27626897252, delta = 1e-05 \n",
+      "[2320] Loss: 0.37229806844. Privacy loss: eps = 1.27725765716, delta = 1e-05 \n",
+      "[2330] Loss: 0.405206274125. Privacy loss: eps = 1.27824634181, delta = 1e-05 \n",
+      "[2340] Loss: 0.386001956894. Privacy loss: eps = 1.27923502646, delta = 1e-05 \n",
+      "[2350] Loss: 0.389106968117. Privacy loss: eps = 1.2802237111, delta = 1e-05 \n",
+      "[2360] Loss: 0.351467174584. Privacy loss: eps = 1.28121239575, delta = 1e-05 \n",
+      "[2370] Loss: 0.368898402605. Privacy loss: eps = 1.2822010804, delta = 1e-05 \n",
+      "[2380] Loss: 0.436961187148. Privacy loss: eps = 1.28318976504, delta = 1e-05 \n",
+      "[2390] Loss: 0.366419123878. Privacy loss: eps = 1.28417844969, delta = 1e-05 \n",
+      "[2400] Loss: 0.414025385656. Privacy loss: eps = 1.28516713434, delta = 1e-05 \n",
+      "Net: Epoch 3. Train Loss: 0.37564006621, Test Loss: 0.352902804175, Train_acc 0.902268789979, Test_acc 0.907643312102\n",
+      "[2410] Loss: 0.409183899993. Privacy loss: eps = 1.28615581898, delta = 1e-05 \n",
+      "[2420] Loss: 0.382582338748. Privacy loss: eps = 1.28714450363, delta = 1e-05 \n",
+      "[2430] Loss: 0.324877903946. Privacy loss: eps = 1.28813318828, delta = 1e-05 \n",
+      "[2440] Loss: 0.345747780484. Privacy loss: eps = 1.28912187292, delta = 1e-05 \n",
+      "[2450] Loss: 0.345733313812. Privacy loss: eps = 1.29011055757, delta = 1e-05 \n",
+      "[2460] Loss: 0.364648511997. Privacy loss: eps = 1.29109924222, delta = 1e-05 \n",
+      "[2470] Loss: 0.333569013296. Privacy loss: eps = 1.29208792686, delta = 1e-05 \n",
+      "[2480] Loss: 0.32505088258. Privacy loss: eps = 1.29307661151, delta = 1e-05 \n",
+      "[2490] Loss: 0.371531492187. Privacy loss: eps = 1.29406529616, delta = 1e-05 \n",
+      "[2500] Loss: 0.35504108455. Privacy loss: eps = 1.2950539808, delta = 1e-05 \n",
+      "[2510] Loss: 0.384314682195. Privacy loss: eps = 1.29604266545, delta = 1e-05 \n",
+      "[2520] Loss: 0.372389068745. Privacy loss: eps = 1.2970313501, delta = 1e-05 \n",
+      "[2530] Loss: 0.394038779603. Privacy loss: eps = 1.29802003474, delta = 1e-05 \n",
+      "[2540] Loss: 0.351502781908. Privacy loss: eps = 1.29900871939, delta = 1e-05 \n",
+      "[2550] Loss: 0.35448776447. Privacy loss: eps = 1.29999740404, delta = 1e-05 \n",
+      "[2560] Loss: 0.368667840807. Privacy loss: eps = 1.30098608868, delta = 1e-05 \n",
+      "[2570] Loss: 0.346025850547. Privacy loss: eps = 1.30197477333, delta = 1e-05 \n",
+      "[2580] Loss: 0.382095955225. Privacy loss: eps = 1.30296345798, delta = 1e-05 \n",
+      "[2590] Loss: 0.344620590085. Privacy loss: eps = 1.30395214262, delta = 1e-05 \n",
+      "[2600] Loss: 0.367502310805. Privacy loss: eps = 1.30494082727, delta = 1e-05 \n",
+      "[2610] Loss: 0.342186451909. Privacy loss: eps = 1.30592951192, delta = 1e-05 \n",
+      "[2620] Loss: 0.349061160031. Privacy loss: eps = 1.30691819656, delta = 1e-05 \n",
+      "[2630] Loss: 0.353143350891. Privacy loss: eps = 1.30790688121, delta = 1e-05 \n",
+      "[2640] Loss: 0.361136912779. Privacy loss: eps = 1.30889556586, delta = 1e-05 \n",
+      "[2650] Loss: 0.380224750584. Privacy loss: eps = 1.3098842505, delta = 1e-05 \n",
+      "[2660] Loss: 0.338446174317. Privacy loss: eps = 1.31087293515, delta = 1e-05 \n",
+      "[2670] Loss: 0.380091509456. Privacy loss: eps = 1.3118616198, delta = 1e-05 \n",
+      "[2680] Loss: 0.335954807903. Privacy loss: eps = 1.31285030444, delta = 1e-05 \n",
+      "[2690] Loss: 0.377595206171. Privacy loss: eps = 1.31383898909, delta = 1e-05 \n",
+      "[2700] Loss: 0.354720765186. Privacy loss: eps = 1.31482767374, delta = 1e-05 \n",
+      "[2710] Loss: 0.385121836978. Privacy loss: eps = 1.31581635838, delta = 1e-05 \n",
+      "[2720] Loss: 0.346931181964. Privacy loss: eps = 1.31680504303, delta = 1e-05 \n",
+      "[2730] Loss: 0.389782464888. Privacy loss: eps = 1.31779372768, delta = 1e-05 \n",
+      "[2740] Loss: 0.397977190346. Privacy loss: eps = 1.31878241232, delta = 1e-05 \n",
+      "[2750] Loss: 0.42109143106. Privacy loss: eps = 1.31977109697, delta = 1e-05 \n",
+      "[2760] Loss: 0.392637791581. Privacy loss: eps = 1.32075978162, delta = 1e-05 \n",
+      "[2770] Loss: 0.361395930843. Privacy loss: eps = 1.32174846626, delta = 1e-05 \n",
+      "[2780] Loss: 0.355595265131. Privacy loss: eps = 1.32273715091, delta = 1e-05 \n",
+      "[2790] Loss: 0.358215839677. Privacy loss: eps = 1.32372583556, delta = 1e-05 \n",
+      "[2800] Loss: 0.350260028854. Privacy loss: eps = 1.3247145202, delta = 1e-05 \n",
+      "[2810] Loss: 0.369739870564. Privacy loss: eps = 1.32570320485, delta = 1e-05 \n",
+      "[2820] Loss: 0.394385971869. Privacy loss: eps = 1.3266918895, delta = 1e-05 \n",
+      "[2830] Loss: 0.360954259506. Privacy loss: eps = 1.32768057414, delta = 1e-05 \n",
+      "[2840] Loss: 0.381705626233. Privacy loss: eps = 1.32866925879, delta = 1e-05 \n",
+      "[2850] Loss: 0.383254043828. Privacy loss: eps = 1.32965794344, delta = 1e-05 \n",
+      "[2860] Loss: 0.383805043242. Privacy loss: eps = 1.33064662808, delta = 1e-05 \n",
+      "[2870] Loss: 0.442024170339. Privacy loss: eps = 1.33163531273, delta = 1e-05 \n",
+      "[2880] Loss: 0.440039861839. Privacy loss: eps = 1.33262399738, delta = 1e-05 \n",
+      "[2890] Loss: 0.435762427822. Privacy loss: eps = 1.33361268202, delta = 1e-05 \n",
+      "[2900] Loss: 0.391888640809. Privacy loss: eps = 1.33460136667, delta = 1e-05 \n",
+      "[2910] Loss: 0.399494229078. Privacy loss: eps = 1.33559005132, delta = 1e-05 \n",
+      "[2920] Loss: 0.42235043215. Privacy loss: eps = 1.33657873596, delta = 1e-05 \n",
+      "[2930] Loss: 0.395291353201. Privacy loss: eps = 1.33756742061, delta = 1e-05 \n",
+      "[2940] Loss: 0.357470212832. Privacy loss: eps = 1.33855610526, delta = 1e-05 \n",
+      "[2950] Loss: 0.351311716293. Privacy loss: eps = 1.3395447899, delta = 1e-05 \n",
+      "[2960] Loss: 0.35447410786. Privacy loss: eps = 1.34053347455, delta = 1e-05 \n",
+      "[2970] Loss: 0.34779906563. Privacy loss: eps = 1.3415221592, delta = 1e-05 \n",
+      "[2980] Loss: 0.340037275209. Privacy loss: eps = 1.34251084384, delta = 1e-05 \n",
+      "[2990] Loss: 0.332420952594. Privacy loss: eps = 1.34349952849, delta = 1e-05 \n",
+      "[3000] Loss: 0.420008766754. Privacy loss: eps = 1.34448821314, delta = 1e-05 \n",
+      "Net: Epoch 4. Train Loss: 0.370066567576, Test Loss: 0.348057143556, Train_acc 0.904251066098, Test_acc 0.908937101911\n",
+      "[3010] Loss: 0.403816958144. Privacy loss: eps = 1.34547689778, delta = 1e-05 \n",
+      "[3020] Loss: 0.39550833296. Privacy loss: eps = 1.34646558243, delta = 1e-05 \n",
+      "[3030] Loss: 0.381241898349. Privacy loss: eps = 1.34745426708, delta = 1e-05 \n",
+      "[3040] Loss: 0.397334272378. Privacy loss: eps = 1.34844295172, delta = 1e-05 \n",
+      "[3050] Loss: 0.385828205297. Privacy loss: eps = 1.34943163637, delta = 1e-05 \n",
+      "[3060] Loss: 0.453826942572. Privacy loss: eps = 1.35042032102, delta = 1e-05 \n",
+      "[3070] Loss: 0.43399659916. Privacy loss: eps = 1.35140900566, delta = 1e-05 \n",
+      "[3080] Loss: 0.384976718804. Privacy loss: eps = 1.35239769031, delta = 1e-05 \n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[3090] Loss: 0.358012833118. Privacy loss: eps = 1.35338637496, delta = 1e-05 \n",
+      "[3100] Loss: 0.329110279499. Privacy loss: eps = 1.3543750596, delta = 1e-05 \n",
+      "[3110] Loss: 0.331509713913. Privacy loss: eps = 1.35536374425, delta = 1e-05 \n",
+      "[3120] Loss: 0.346915179263. Privacy loss: eps = 1.3563524289, delta = 1e-05 \n",
+      "[3130] Loss: 0.328192356816. Privacy loss: eps = 1.35734111354, delta = 1e-05 \n",
+      "[3140] Loss: 0.384450558088. Privacy loss: eps = 1.35832979819, delta = 1e-05 \n",
+      "[3150] Loss: 0.392175222054. Privacy loss: eps = 1.35931848284, delta = 1e-05 \n",
+      "[3160] Loss: 0.372153162434. Privacy loss: eps = 1.36030716748, delta = 1e-05 \n",
+      "[3170] Loss: 0.382289697727. Privacy loss: eps = 1.36129585213, delta = 1e-05 \n",
+      "[3180] Loss: 0.423836290567. Privacy loss: eps = 1.36228453678, delta = 1e-05 \n",
+      "[3190] Loss: 0.355214057394. Privacy loss: eps = 1.36327322142, delta = 1e-05 \n",
+      "[3200] Loss: 0.372583125594. Privacy loss: eps = 1.36426190607, delta = 1e-05 \n",
+      "[3210] Loss: 0.354220493212. Privacy loss: eps = 1.36525059072, delta = 1e-05 \n",
+      "[3220] Loss: 0.338762711578. Privacy loss: eps = 1.36623927536, delta = 1e-05 \n",
+      "[3230] Loss: 0.399512396753. Privacy loss: eps = 1.36722796001, delta = 1e-05 \n",
+      "[3240] Loss: 0.361107631772. Privacy loss: eps = 1.36821664466, delta = 1e-05 \n",
+      "[3250] Loss: 0.37856826829. Privacy loss: eps = 1.3692053293, delta = 1e-05 \n",
+      "[3260] Loss: 0.355336035549. Privacy loss: eps = 1.37019401395, delta = 1e-05 \n",
+      "[3270] Loss: 0.389498649242. Privacy loss: eps = 1.3711826986, delta = 1e-05 \n",
+      "[3280] Loss: 0.349800276567. Privacy loss: eps = 1.37217138324, delta = 1e-05 \n",
+      "[3290] Loss: 0.350807025734. Privacy loss: eps = 1.37316006789, delta = 1e-05 \n",
+      "[3300] Loss: 0.328804608997. Privacy loss: eps = 1.37414875254, delta = 1e-05 \n",
+      "[3310] Loss: 0.334677571451. Privacy loss: eps = 1.37513743718, delta = 1e-05 \n",
+      "[3320] Loss: 0.320328915566. Privacy loss: eps = 1.37612612183, delta = 1e-05 \n",
+      "[3330] Loss: 0.319517964336. Privacy loss: eps = 1.37711480648, delta = 1e-05 \n",
+      "[3340] Loss: 0.324593909251. Privacy loss: eps = 1.37810349112, delta = 1e-05 \n",
+      "[3350] Loss: 0.372867311762. Privacy loss: eps = 1.37909217577, delta = 1e-05 \n",
+      "[3360] Loss: 0.287746236389. Privacy loss: eps = 1.38008086042, delta = 1e-05 \n",
+      "[3370] Loss: 0.333100385619. Privacy loss: eps = 1.38106954506, delta = 1e-05 \n",
+      "[3380] Loss: 0.338849115666. Privacy loss: eps = 1.38205822971, delta = 1e-05 \n",
+      "[3390] Loss: 0.360284011068. Privacy loss: eps = 1.38304691436, delta = 1e-05 \n",
+      "[3400] Loss: 0.410862944726. Privacy loss: eps = 1.384035599, delta = 1e-05 \n",
+      "[3410] Loss: 0.370608682203. Privacy loss: eps = 1.38502428365, delta = 1e-05 \n",
+      "[3420] Loss: 0.33121647342. Privacy loss: eps = 1.3860129683, delta = 1e-05 \n",
+      "[3430] Loss: 0.399519774349. Privacy loss: eps = 1.38700165294, delta = 1e-05 \n",
+      "[3440] Loss: 0.364604312167. Privacy loss: eps = 1.38799033759, delta = 1e-05 \n",
+      "[3450] Loss: 0.34440603685. Privacy loss: eps = 1.38897902224, delta = 1e-05 \n",
+      "[3460] Loss: 0.328536938389. Privacy loss: eps = 1.38996770688, delta = 1e-05 \n",
+      "[3470] Loss: 0.350356167651. Privacy loss: eps = 1.39095639153, delta = 1e-05 \n",
+      "[3480] Loss: 0.378442173978. Privacy loss: eps = 1.39194507618, delta = 1e-05 \n",
+      "[3490] Loss: 0.38459339946. Privacy loss: eps = 1.39293376082, delta = 1e-05 \n",
+      "[3500] Loss: 0.379565398218. Privacy loss: eps = 1.39392244547, delta = 1e-05 \n",
+      "[3510] Loss: 0.376230376361. Privacy loss: eps = 1.39491113012, delta = 1e-05 \n",
+      "[3520] Loss: 0.370351906226. Privacy loss: eps = 1.39589981476, delta = 1e-05 \n",
+      "[3530] Loss: 0.407977427052. Privacy loss: eps = 1.39688849941, delta = 1e-05 \n",
+      "[3540] Loss: 0.429301852131. Privacy loss: eps = 1.39787718406, delta = 1e-05 \n",
+      "[3550] Loss: 0.417012504259. Privacy loss: eps = 1.3988658687, delta = 1e-05 \n",
+      "[3560] Loss: 0.435587500586. Privacy loss: eps = 1.39985455335, delta = 1e-05 \n",
+      "[3570] Loss: 0.368463514354. Privacy loss: eps = 1.400843238, delta = 1e-05 \n",
+      "[3580] Loss: 0.389770709414. Privacy loss: eps = 1.40183192264, delta = 1e-05 \n",
+      "[3590] Loss: 0.355092192167. Privacy loss: eps = 1.40282060729, delta = 1e-05 \n",
+      "[3600] Loss: 0.306434844954. Privacy loss: eps = 1.40380929194, delta = 1e-05 \n",
+      "Net: Epoch 5. Train Loss: 0.368140432412, Test Loss: 0.346689494956, Train_acc 0.904850746269, Test_acc 0.909633757962\n",
+      "[3610] Loss: 0.321662727201. Privacy loss: eps = 1.40479797658, delta = 1e-05 \n",
+      "[3620] Loss: 0.341535739312. Privacy loss: eps = 1.40578666123, delta = 1e-05 \n",
+      "[3630] Loss: 0.363561341316. Privacy loss: eps = 1.40677534588, delta = 1e-05 \n",
+      "[3640] Loss: 0.351917502618. Privacy loss: eps = 1.40776403052, delta = 1e-05 \n",
+      "[3650] Loss: 0.336870937361. Privacy loss: eps = 1.40875271517, delta = 1e-05 \n",
+      "[3660] Loss: 0.362210082333. Privacy loss: eps = 1.40974139982, delta = 1e-05 \n",
+      "[3670] Loss: 0.410470695114. Privacy loss: eps = 1.41073008446, delta = 1e-05 \n",
+      "[3680] Loss: 0.422485719207. Privacy loss: eps = 1.41171876911, delta = 1e-05 \n",
+      "[3690] Loss: 0.394709873329. Privacy loss: eps = 1.41270745376, delta = 1e-05 \n",
+      "[3700] Loss: 0.335457146811. Privacy loss: eps = 1.4136961384, delta = 1e-05 \n",
+      "[3710] Loss: 0.331361907363. Privacy loss: eps = 1.41468482305, delta = 1e-05 \n",
+      "[3720] Loss: 0.342553345281. Privacy loss: eps = 1.4156735077, delta = 1e-05 \n",
+      "[3730] Loss: 0.332426272564. Privacy loss: eps = 1.41666219234, delta = 1e-05 \n",
+      "[3740] Loss: 0.379844292156. Privacy loss: eps = 1.41765087699, delta = 1e-05 \n",
+      "[3750] Loss: 0.341205371937. Privacy loss: eps = 1.41863956164, delta = 1e-05 \n",
+      "[3760] Loss: 0.345304684733. Privacy loss: eps = 1.41962824628, delta = 1e-05 \n",
+      "[3770] Loss: 0.314587914023. Privacy loss: eps = 1.42061693093, delta = 1e-05 \n",
+      "[3780] Loss: 0.32478858533. Privacy loss: eps = 1.42160561558, delta = 1e-05 \n",
+      "[3790] Loss: 0.363549468729. Privacy loss: eps = 1.42259430022, delta = 1e-05 \n",
+      "[3800] Loss: 0.354955948314. Privacy loss: eps = 1.42358298487, delta = 1e-05 \n",
+      "[3810] Loss: 0.385716187621. Privacy loss: eps = 1.42457166952, delta = 1e-05 \n",
+      "[3820] Loss: 0.410217715531. Privacy loss: eps = 1.42556035416, delta = 1e-05 \n",
+      "[3830] Loss: 0.411703209201. Privacy loss: eps = 1.42654903881, delta = 1e-05 \n",
+      "[3840] Loss: 0.374408382684. Privacy loss: eps = 1.42753772346, delta = 1e-05 \n",
+      "[3850] Loss: 0.425103093023. Privacy loss: eps = 1.4285264081, delta = 1e-05 \n",
+      "[3860] Loss: 0.382756865127. Privacy loss: eps = 1.42951509275, delta = 1e-05 \n",
+      "[3870] Loss: 0.398858773415. Privacy loss: eps = 1.4305037774, delta = 1e-05 \n",
+      "[3880] Loss: 0.365076757764. Privacy loss: eps = 1.43149246204, delta = 1e-05 \n",
+      "[3890] Loss: 0.389868085234. Privacy loss: eps = 1.43248114669, delta = 1e-05 \n",
+      "[3900] Loss: 0.374049981559. Privacy loss: eps = 1.43346983134, delta = 1e-05 \n",
+      "[3910] Loss: 0.367443948637. Privacy loss: eps = 1.43445851598, delta = 1e-05 \n",
+      "[3920] Loss: 0.399809331251. Privacy loss: eps = 1.43544720063, delta = 1e-05 \n",
+      "[3930] Loss: 0.396607904472. Privacy loss: eps = 1.43643588528, delta = 1e-05 \n",
+      "[3940] Loss: 0.374441797566. Privacy loss: eps = 1.43742456992, delta = 1e-05 \n",
+      "[3950] Loss: 0.392873584971. Privacy loss: eps = 1.43841325457, delta = 1e-05 \n",
+      "[3960] Loss: 0.364936479963. Privacy loss: eps = 1.43940193922, delta = 1e-05 \n",
+      "[3970] Loss: 0.3503573536. Privacy loss: eps = 1.44039062386, delta = 1e-05 \n",
+      "[3980] Loss: 0.437211452478. Privacy loss: eps = 1.44137930851, delta = 1e-05 \n",
+      "[3990] Loss: 0.357215414058. Privacy loss: eps = 1.44236799316, delta = 1e-05 \n",
+      "[4000] Loss: 0.386548789453. Privacy loss: eps = 1.4433566778, delta = 1e-05 \n",
+      "[4010] Loss: 0.380743849214. Privacy loss: eps = 1.44434536245, delta = 1e-05 \n",
+      "[4020] Loss: 0.343592806394. Privacy loss: eps = 1.4453340471, delta = 1e-05 \n",
+      "[4030] Loss: 0.339513796461. Privacy loss: eps = 1.44632273174, delta = 1e-05 \n",
+      "[4040] Loss: 0.321553173164. Privacy loss: eps = 1.44731141639, delta = 1e-05 \n",
+      "[4050] Loss: 0.352456029867. Privacy loss: eps = 1.44830010104, delta = 1e-05 \n",
+      "[4060] Loss: 0.399652347771. Privacy loss: eps = 1.44928878568, delta = 1e-05 \n",
+      "[4070] Loss: 0.359910785639. Privacy loss: eps = 1.45027747033, delta = 1e-05 \n",
+      "[4080] Loss: 0.322913783596. Privacy loss: eps = 1.45126615498, delta = 1e-05 \n",
+      "[4090] Loss: 0.334567525516. Privacy loss: eps = 1.45225483962, delta = 1e-05 \n",
+      "[4100] Loss: 0.359853402976. Privacy loss: eps = 1.45324352427, delta = 1e-05 \n",
+      "[4110] Loss: 0.384481102364. Privacy loss: eps = 1.45423220892, delta = 1e-05 \n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[4120] Loss: 0.363258926838. Privacy loss: eps = 1.45522089356, delta = 1e-05 \n",
+      "[4130] Loss: 0.332406921289. Privacy loss: eps = 1.45620957821, delta = 1e-05 \n",
+      "[4140] Loss: 0.327428272996. Privacy loss: eps = 1.45719826286, delta = 1e-05 \n",
+      "[4150] Loss: 0.34129160146. Privacy loss: eps = 1.4581869475, delta = 1e-05 \n",
+      "[4160] Loss: 0.375347294563. Privacy loss: eps = 1.45917563215, delta = 1e-05 \n",
+      "[4170] Loss: 0.383305984371. Privacy loss: eps = 1.4601643168, delta = 1e-05 \n",
+      "[4180] Loss: 0.36659249567. Privacy loss: eps = 1.46115300144, delta = 1e-05 \n",
+      "[4190] Loss: 0.349989981714. Privacy loss: eps = 1.46214168609, delta = 1e-05 \n",
+      "[4200] Loss: 0.341840614378. Privacy loss: eps = 1.46313037074, delta = 1e-05 \n",
+      "Net: Epoch 6. Train Loss: 0.3668172636, Test Loss: 0.345251970088, Train_acc 0.904917377399, Test_acc 0.910628980892\n",
+      "[4210] Loss: 0.326740471362. Privacy loss: eps = 1.46411905538, delta = 1e-05 \n",
+      "[4220] Loss: 0.378850225781. Privacy loss: eps = 1.46510774003, delta = 1e-05 \n",
+      "[4230] Loss: 0.392054889237. Privacy loss: eps = 1.46609642468, delta = 1e-05 \n",
+      "[4240] Loss: 0.378197960209. Privacy loss: eps = 1.46708510932, delta = 1e-05 \n",
+      "[4250] Loss: 0.415471476808. Privacy loss: eps = 1.46807379397, delta = 1e-05 \n",
+      "[4260] Loss: 0.359728944395. Privacy loss: eps = 1.46906247862, delta = 1e-05 \n",
+      "[4270] Loss: 0.413121298679. Privacy loss: eps = 1.47005116326, delta = 1e-05 \n",
+      "[4280] Loss: 0.366295063783. Privacy loss: eps = 1.47103984791, delta = 1e-05 \n",
+      "[4290] Loss: 0.343395469449. Privacy loss: eps = 1.47202853256, delta = 1e-05 \n",
+      "[4300] Loss: 0.34019217704. Privacy loss: eps = 1.4730172172, delta = 1e-05 \n",
+      "[4310] Loss: 0.413919379574. Privacy loss: eps = 1.47400590185, delta = 1e-05 \n",
+      "[4320] Loss: 0.391385935549. Privacy loss: eps = 1.4749945865, delta = 1e-05 \n",
+      "[4330] Loss: 0.430797332848. Privacy loss: eps = 1.47598327114, delta = 1e-05 \n",
+      "[4340] Loss: 0.391108252034. Privacy loss: eps = 1.47697195579, delta = 1e-05 \n",
+      "[4350] Loss: 0.387292476869. Privacy loss: eps = 1.47796064044, delta = 1e-05 \n",
+      "[4360] Loss: 0.398362159447. Privacy loss: eps = 1.47894932508, delta = 1e-05 \n",
+      "[4370] Loss: 0.383932410454. Privacy loss: eps = 1.47993800973, delta = 1e-05 \n",
+      "[4380] Loss: 0.358315124677. Privacy loss: eps = 1.48092669438, delta = 1e-05 \n",
+      "[4390] Loss: 0.35632003896. Privacy loss: eps = 1.48191537902, delta = 1e-05 \n",
+      "[4400] Loss: 0.358826886658. Privacy loss: eps = 1.48290406367, delta = 1e-05 \n",
+      "[4410] Loss: 0.341015795619. Privacy loss: eps = 1.48389274832, delta = 1e-05 \n",
+      "[4420] Loss: 0.360973102376. Privacy loss: eps = 1.48488143296, delta = 1e-05 \n",
+      "[4430] Loss: 0.392859660101. Privacy loss: eps = 1.48587011761, delta = 1e-05 \n",
+      "[4440] Loss: 0.351674661869. Privacy loss: eps = 1.48685880226, delta = 1e-05 \n",
+      "[4450] Loss: 0.349548466984. Privacy loss: eps = 1.4878474869, delta = 1e-05 \n",
+      "[4460] Loss: 0.355712704321. Privacy loss: eps = 1.48883617155, delta = 1e-05 \n",
+      "[4470] Loss: 0.311400994893. Privacy loss: eps = 1.4898248562, delta = 1e-05 \n",
+      "[4480] Loss: 0.299184389645. Privacy loss: eps = 1.49081354084, delta = 1e-05 \n",
+      "[4490] Loss: 0.295286606199. Privacy loss: eps = 1.49180222549, delta = 1e-05 \n",
+      "[4500] Loss: 0.348383249292. Privacy loss: eps = 1.49279091014, delta = 1e-05 \n",
+      "[4510] Loss: 0.374079997393. Privacy loss: eps = 1.49377959478, delta = 1e-05 \n",
+      "[4520] Loss: 0.379813461597. Privacy loss: eps = 1.49476827943, delta = 1e-05 \n",
+      "[4530] Loss: 0.369156195437. Privacy loss: eps = 1.49575696408, delta = 1e-05 \n",
+      "[4540] Loss: 0.366000188798. Privacy loss: eps = 1.49674564872, delta = 1e-05 \n",
+      "[4550] Loss: 0.380821440305. Privacy loss: eps = 1.49773433337, delta = 1e-05 \n",
+      "[4560] Loss: 0.340241748319. Privacy loss: eps = 1.49872301802, delta = 1e-05 \n",
+      "[4570] Loss: 0.335130207378. Privacy loss: eps = 1.49971170266, delta = 1e-05 \n",
+      "[4580] Loss: 0.364266788263. Privacy loss: eps = 1.50070038731, delta = 1e-05 \n",
+      "[4590] Loss: 0.361980156184. Privacy loss: eps = 1.50168907196, delta = 1e-05 \n",
+      "[4600] Loss: 0.382067936399. Privacy loss: eps = 1.5026777566, delta = 1e-05 \n",
+      "[4610] Loss: 0.347786606671. Privacy loss: eps = 1.50366644125, delta = 1e-05 \n",
+      "[4620] Loss: 0.355983153151. Privacy loss: eps = 1.5046551259, delta = 1e-05 \n",
+      "[4630] Loss: 0.394646294112. Privacy loss: eps = 1.50564381054, delta = 1e-05 \n",
+      "[4640] Loss: 0.391034242003. Privacy loss: eps = 1.50663249519, delta = 1e-05 \n",
+      "[4650] Loss: 0.360514323841. Privacy loss: eps = 1.50762117984, delta = 1e-05 \n",
+      "[4660] Loss: 0.375640615206. Privacy loss: eps = 1.50860986448, delta = 1e-05 \n",
+      "[4670] Loss: 0.386972487975. Privacy loss: eps = 1.50959854913, delta = 1e-05 \n",
+      "[4680] Loss: 0.401396381394. Privacy loss: eps = 1.51058723378, delta = 1e-05 \n",
+      "[4690] Loss: 0.385708108552. Privacy loss: eps = 1.51157591842, delta = 1e-05 \n",
+      "[4700] Loss: 0.348716046593. Privacy loss: eps = 1.51256460307, delta = 1e-05 \n",
+      "[4710] Loss: 0.337426263742. Privacy loss: eps = 1.51355328772, delta = 1e-05 \n",
+      "[4720] Loss: 0.374782862471. Privacy loss: eps = 1.51454197236, delta = 1e-05 \n",
+      "[4730] Loss: 0.391592909312. Privacy loss: eps = 1.51553065701, delta = 1e-05 \n",
+      "[4740] Loss: 0.36742553244. Privacy loss: eps = 1.51651934166, delta = 1e-05 \n",
+      "[4750] Loss: 0.373420858238. Privacy loss: eps = 1.5175080263, delta = 1e-05 \n",
+      "[4760] Loss: 0.372490644857. Privacy loss: eps = 1.51849671095, delta = 1e-05 \n",
+      "[4770] Loss: 0.325608534264. Privacy loss: eps = 1.5194853956, delta = 1e-05 \n",
+      "[4780] Loss: 0.313100910192. Privacy loss: eps = 1.52047408024, delta = 1e-05 \n",
+      "[4790] Loss: 0.317344026779. Privacy loss: eps = 1.52146276489, delta = 1e-05 \n",
+      "[4800] Loss: 0.358145687164. Privacy loss: eps = 1.52245144954, delta = 1e-05 \n",
+      "Net: Epoch 7. Train Loss: 0.366065295175, Test Loss: 0.344677650174, Train_acc 0.905300506397, Test_acc 0.910927547771\n",
+      "[4810] Loss: 0.430976053663. Privacy loss: eps = 1.52344013418, delta = 1e-05 \n",
+      "[4820] Loss: 0.425745117592. Privacy loss: eps = 1.52442881883, delta = 1e-05 \n",
+      "[4830] Loss: 0.384824740809. Privacy loss: eps = 1.52541750348, delta = 1e-05 \n",
+      "[4840] Loss: 0.393423781734. Privacy loss: eps = 1.52640618812, delta = 1e-05 \n",
+      "[4850] Loss: 0.352806229223. Privacy loss: eps = 1.52739487277, delta = 1e-05 \n",
+      "[4860] Loss: 0.395702176307. Privacy loss: eps = 1.52838355742, delta = 1e-05 \n",
+      "[4870] Loss: 0.339716467387. Privacy loss: eps = 1.52937224206, delta = 1e-05 \n",
+      "[4880] Loss: 0.370948656051. Privacy loss: eps = 1.53036092671, delta = 1e-05 \n",
+      "[4890] Loss: 0.411298779237. Privacy loss: eps = 1.53134961136, delta = 1e-05 \n",
+      "[4900] Loss: 0.409187947713. Privacy loss: eps = 1.532338296, delta = 1e-05 \n",
+      "[4910] Loss: 0.388190212395. Privacy loss: eps = 1.53332698065, delta = 1e-05 \n",
+      "[4920] Loss: 0.375121723931. Privacy loss: eps = 1.5343156653, delta = 1e-05 \n",
+      "[4930] Loss: 0.329491303779. Privacy loss: eps = 1.53530434994, delta = 1e-05 \n",
+      "[4940] Loss: 0.344218813121. Privacy loss: eps = 1.53629303459, delta = 1e-05 \n",
+      "[4950] Loss: 0.360126147458. Privacy loss: eps = 1.53728171924, delta = 1e-05 \n",
+      "[4960] Loss: 0.373091533814. Privacy loss: eps = 1.53827040388, delta = 1e-05 \n",
+      "[4970] Loss: 0.346885883175. Privacy loss: eps = 1.53925908853, delta = 1e-05 \n",
+      "[4980] Loss: 0.369351748702. Privacy loss: eps = 1.54024777318, delta = 1e-05 \n",
+      "[4990] Loss: 0.367744804626. Privacy loss: eps = 1.54123645782, delta = 1e-05 \n",
+      "[5000] Loss: 0.419109261632. Privacy loss: eps = 1.54222514247, delta = 1e-05 \n",
+      "[5010] Loss: 0.402295655948. Privacy loss: eps = 1.54321382712, delta = 1e-05 \n",
+      "[5020] Loss: 0.396284335304. Privacy loss: eps = 1.54420251176, delta = 1e-05 \n",
+      "[5030] Loss: 0.374505997201. Privacy loss: eps = 1.54519119641, delta = 1e-05 \n",
+      "[5040] Loss: 0.332923566666. Privacy loss: eps = 1.54617988106, delta = 1e-05 \n",
+      "[5050] Loss: 0.344339939549. Privacy loss: eps = 1.5471685657, delta = 1e-05 \n",
+      "[5060] Loss: 0.364151926307. Privacy loss: eps = 1.54815725035, delta = 1e-05 \n",
+      "[5070] Loss: 0.407518289854. Privacy loss: eps = 1.549145935, delta = 1e-05 \n",
+      "[5080] Loss: 0.402759867213. Privacy loss: eps = 1.55013461964, delta = 1e-05 \n",
+      "[5090] Loss: 0.359293714467. Privacy loss: eps = 1.55112330429, delta = 1e-05 \n",
+      "[5100] Loss: 0.372563512754. Privacy loss: eps = 1.55211198894, delta = 1e-05 \n",
+      "[5110] Loss: 0.377625801957. Privacy loss: eps = 1.55310067358, delta = 1e-05 \n",
+      "[5120] Loss: 0.304342237357. Privacy loss: eps = 1.55408935823, delta = 1e-05 \n"
+     ]
+    },
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "[5130] Loss: 0.337282706653. Privacy loss: eps = 1.55507804288, delta = 1e-05 \n",
+      "[5140] Loss: 0.349742157456. Privacy loss: eps = 1.55606672752, delta = 1e-05 \n",
+      "[5150] Loss: 0.357104731334. Privacy loss: eps = 1.55705541217, delta = 1e-05 \n",
+      "[5160] Loss: 0.356343934101. Privacy loss: eps = 1.55804409682, delta = 1e-05 \n",
+      "[5170] Loss: 0.369056621237. Privacy loss: eps = 1.55903278146, delta = 1e-05 \n",
+      "[5180] Loss: 0.306943625618. Privacy loss: eps = 1.56002146611, delta = 1e-05 \n",
+      "[5190] Loss: 0.328274388393. Privacy loss: eps = 1.56101015076, delta = 1e-05 \n",
+      "[5200] Loss: 0.34766876036. Privacy loss: eps = 1.5619988354, delta = 1e-05 \n",
+      "[5210] Loss: 0.31121254543. Privacy loss: eps = 1.56298752005, delta = 1e-05 \n",
+      "[5220] Loss: 0.346063648681. Privacy loss: eps = 1.5639762047, delta = 1e-05 \n",
+      "[5230] Loss: 0.35203270802. Privacy loss: eps = 1.56496488934, delta = 1e-05 \n",
+      "[5240] Loss: 0.343166467782. Privacy loss: eps = 1.56595357399, delta = 1e-05 \n",
+      "[5250] Loss: 0.408203401002. Privacy loss: eps = 1.56694225864, delta = 1e-05 \n",
+      "[5260] Loss: 0.399661408871. Privacy loss: eps = 1.56793094328, delta = 1e-05 \n",
+      "[5270] Loss: 0.355236074307. Privacy loss: eps = 1.56891962793, delta = 1e-05 \n",
+      "[5280] Loss: 0.365445665455. Privacy loss: eps = 1.56990831258, delta = 1e-05 \n",
+      "[5290] Loss: 0.331479661421. Privacy loss: eps = 1.57089699722, delta = 1e-05 \n",
+      "[5300] Loss: 0.330531253116. Privacy loss: eps = 1.57188568187, delta = 1e-05 \n",
+      "[5310] Loss: 0.352265972595. Privacy loss: eps = 1.57287436652, delta = 1e-05 \n",
+      "[5320] Loss: 0.349854657009. Privacy loss: eps = 1.57386305116, delta = 1e-05 \n",
+      "[5330] Loss: 0.359140819454. Privacy loss: eps = 1.57485173581, delta = 1e-05 \n",
+      "[5340] Loss: 0.331497755413. Privacy loss: eps = 1.57584042046, delta = 1e-05 \n",
+      "[5350] Loss: 0.39790298789. Privacy loss: eps = 1.5768291051, delta = 1e-05 \n",
+      "[5360] Loss: 0.363463606921. Privacy loss: eps = 1.57781778975, delta = 1e-05 \n",
+      "[5370] Loss: 0.357193452388. Privacy loss: eps = 1.5788064744, delta = 1e-05 \n",
+      "[5380] Loss: 0.352469533237. Privacy loss: eps = 1.57979515904, delta = 1e-05 \n",
+      "[5390] Loss: 0.357331987618. Privacy loss: eps = 1.58078384369, delta = 1e-05 \n",
+      "[5400] Loss: 0.396139577075. Privacy loss: eps = 1.58177252834, delta = 1e-05 \n",
+      "Net: Epoch 8. Train Loss: 0.365700747838, Test Loss: 0.344305759284, Train_acc 0.905383795309, Test_acc 0.910628980892\n",
+      "[5410] Loss: 0.369872177303. Privacy loss: eps = 1.58276121298, delta = 1e-05 \n",
+      "[5420] Loss: 0.323984273689. Privacy loss: eps = 1.58374989763, delta = 1e-05 \n",
+      "[5430] Loss: 0.385663283794. Privacy loss: eps = 1.58473858228, delta = 1e-05 \n",
+      "[5440] Loss: 0.381332324465. Privacy loss: eps = 1.58572726692, delta = 1e-05 \n",
+      "[5450] Loss: 0.341043498075. Privacy loss: eps = 1.58671595157, delta = 1e-05 \n",
+      "[5460] Loss: 0.357028726533. Privacy loss: eps = 1.58770463622, delta = 1e-05 \n",
+      "[5470] Loss: 0.384536606767. Privacy loss: eps = 1.58869332086, delta = 1e-05 \n",
+      "[5480] Loss: 0.360967463607. Privacy loss: eps = 1.58968200551, delta = 1e-05 \n",
+      "[5490] Loss: 0.378419624802. Privacy loss: eps = 1.59067069016, delta = 1e-05 \n",
+      "[5500] Loss: 0.327784580781. Privacy loss: eps = 1.5916593748, delta = 1e-05 \n",
+      "[5510] Loss: 0.342155586845. Privacy loss: eps = 1.59264805945, delta = 1e-05 \n",
+      "[5520] Loss: 0.407532770828. Privacy loss: eps = 1.5936367441, delta = 1e-05 \n",
+      "[5530] Loss: 0.368336005773. Privacy loss: eps = 1.59462542874, delta = 1e-05 \n",
+      "[5540] Loss: 0.341355583164. Privacy loss: eps = 1.59561411339, delta = 1e-05 \n",
+      "[5550] Loss: 0.363600602917. Privacy loss: eps = 1.59660279804, delta = 1e-05 \n",
+      "[5560] Loss: 0.327762419727. Privacy loss: eps = 1.59759148268, delta = 1e-05 \n",
+      "[5570] Loss: 0.349045364791. Privacy loss: eps = 1.59858016733, delta = 1e-05 \n",
+      "[5580] Loss: 0.336348212516. Privacy loss: eps = 1.59956885198, delta = 1e-05 \n",
+      "[5590] Loss: 0.326067098472. Privacy loss: eps = 1.60055753662, delta = 1e-05 \n",
+      "[5600] Loss: 0.365685792088. Privacy loss: eps = 1.60154622127, delta = 1e-05 \n",
+      "[5610] Loss: 0.372803893626. Privacy loss: eps = 1.60253490592, delta = 1e-05 \n",
+      "[5620] Loss: 0.342617507528. Privacy loss: eps = 1.60352359056, delta = 1e-05 \n",
+      "[5630] Loss: 0.33995144983. Privacy loss: eps = 1.60451227521, delta = 1e-05 \n",
+      "[5640] Loss: 0.349218021288. Privacy loss: eps = 1.60550095986, delta = 1e-05 \n",
+      "[5650] Loss: 0.37335014808. Privacy loss: eps = 1.6064896445, delta = 1e-05 \n",
+      "[5660] Loss: 0.362334559059. Privacy loss: eps = 1.60747832915, delta = 1e-05 \n",
+      "[5670] Loss: 0.401524945074. Privacy loss: eps = 1.6084670138, delta = 1e-05 \n",
+      "[5680] Loss: 0.440670887394. Privacy loss: eps = 1.60945569844, delta = 1e-05 \n",
+      "[5690] Loss: 0.392936434423. Privacy loss: eps = 1.61044438309, delta = 1e-05 \n",
+      "[5700] Loss: 0.389849443065. Privacy loss: eps = 1.61143306774, delta = 1e-05 \n",
+      "[5710] Loss: 0.356315845473. Privacy loss: eps = 1.61242175238, delta = 1e-05 \n",
+      "[5720] Loss: 0.349339601251. Privacy loss: eps = 1.61341043703, delta = 1e-05 \n",
+      "[5730] Loss: 0.36347762001. Privacy loss: eps = 1.61439912168, delta = 1e-05 \n",
+      "[5740] Loss: 0.400490356977. Privacy loss: eps = 1.61538780632, delta = 1e-05 \n",
+      "[5750] Loss: 0.392635021698. Privacy loss: eps = 1.61637649097, delta = 1e-05 \n",
+      "[5760] Loss: 0.369572663426. Privacy loss: eps = 1.61736517562, delta = 1e-05 \n",
+      "[5770] Loss: 0.343977955664. Privacy loss: eps = 1.61835386026, delta = 1e-05 \n",
+      "[5780] Loss: 0.37960619063. Privacy loss: eps = 1.61934254491, delta = 1e-05 \n",
+      "[5790] Loss: 0.367284306614. Privacy loss: eps = 1.62033122956, delta = 1e-05 \n",
+      "[5800] Loss: 0.348647647014. Privacy loss: eps = 1.6213199142, delta = 1e-05 \n",
+      "[5810] Loss: 0.360165617557. Privacy loss: eps = 1.62230859885, delta = 1e-05 \n",
+      "[5820] Loss: 0.379052174658. Privacy loss: eps = 1.6232972835, delta = 1e-05 \n",
+      "[5830] Loss: 0.389903800553. Privacy loss: eps = 1.62428596814, delta = 1e-05 \n",
+      "[5840] Loss: 0.371721627107. Privacy loss: eps = 1.62527465279, delta = 1e-05 \n",
+      "[5850] Loss: 0.395574647035. Privacy loss: eps = 1.62626333744, delta = 1e-05 \n",
+      "[5860] Loss: 0.412040792357. Privacy loss: eps = 1.62725202208, delta = 1e-05 \n",
+      "[5870] Loss: 0.37480268992. Privacy loss: eps = 1.62824070673, delta = 1e-05 \n",
+      "[5880] Loss: 0.391682601633. Privacy loss: eps = 1.62922939138, delta = 1e-05 \n",
+      "[5890] Loss: 0.377434325881. Privacy loss: eps = 1.63021807602, delta = 1e-05 \n",
+      "[5900] Loss: 0.3575602516. Privacy loss: eps = 1.63120676067, delta = 1e-05 \n",
+      "[5910] Loss: 0.347679944099. Privacy loss: eps = 1.63219544532, delta = 1e-05 \n",
+      "[5920] Loss: 0.360656394091. Privacy loss: eps = 1.63318412996, delta = 1e-05 \n",
+      "[5930] Loss: 0.333699994212. Privacy loss: eps = 1.63417281461, delta = 1e-05 \n",
+      "[5940] Loss: 0.352564128233. Privacy loss: eps = 1.63516149926, delta = 1e-05 \n",
+      "[5950] Loss: 0.34606085913. Privacy loss: eps = 1.6361501839, delta = 1e-05 \n",
+      "[5960] Loss: 0.362537191607. Privacy loss: eps = 1.63713886855, delta = 1e-05 \n",
+      "[5970] Loss: 0.36343034356. Privacy loss: eps = 1.6381275532, delta = 1e-05 \n",
+      "[5980] Loss: 0.337318845262. Privacy loss: eps = 1.63911623784, delta = 1e-05 \n",
+      "[5990] Loss: 0.324438747088. Privacy loss: eps = 1.64010492249, delta = 1e-05 \n",
+      "[6000] Loss: 0.352022448561. Privacy loss: eps = 1.64109360714, delta = 1e-05 \n",
+      "Net: Epoch 9. Train Loss: 0.365525248327, Test Loss: 0.344171396249, Train_acc 0.905417110874, Test_acc 0.910728503185\n"
+     ]
+    }
+   ],
+   "source": [
+    "for e in range(epochs):\n",
+    "    # train_data.reset()  # Reset does not shuffle yet\n",
+    "    train_data = mx.io.NDArrayIter(mnist[\"train_data\"], mnist[\"train_label\"],\n",
+    "                                   batch_size, shuffle=True)\n",
+    "    for i, batch in enumerate(train_data):\n",
+    "        data = batch.data[0].as_in_context(ctx).reshape((-1, 784))\n",
+    "        label = batch.label[0].as_in_context(ctx)\n",
+    "        with autograd.record():\n",
+    "            output = net(data)\n",
+    "            loss = softmax_cross_entropy(output, label)\n",
+    "        loss.backward()\n",
+    "\n",
+    "        # calculate an moving average estimate of the loss\n",
+    "        count += 1\n",
+    "        moving_loss = .999 * moving_loss + .001 * nd.mean(loss).asscalar()\n",
+    "        est_loss = moving_loss / (1 - 0.999 ** count)\n",
+    "\n",
+    "        # Add up the clipped individual gradient\n",
+    "        dpdl_utils.accumuate_grad(grads, params, thresh)\n",
+    "\n",
+    "        #print(i)\n",
+    "        if not (i + 1) % batchsz:  # update the parameters when we collect enough data\n",
+    "\n",
+    "            privateSGD(params, grads, learning_rate/batchsz,sigma,wd=0.1,ctx=ctx)\n",
+    "\n",
+    "            # Keep track of the privacy loss\n",
+    "            DPobject.compose_subsampled_mechanism(func,1.0*batchsz/n)\n",
+    "  \n",
+    "            \n",
+    "            dpdl_utils.reset_grad(grads)\n",
+    "\n",
+    "        if count % (10*batchsz) is 0:\n",
+    "            print(\"[%s] Loss: %s. Privacy loss: eps = %s, delta = %s \" % (((count+1)/batchsz),est_loss,DPobject.get_eps(delta),delta))\n",
+    "            logs['MAloss'].append(est_loss)\n",
+    "        ##########################\n",
+    "        #  Keep a moving average of the losses\n",
+    "        ##########################\n",
+    "\n",
+    "        if count % 60000 is 0:\n",
+    "            test_accuracy, loss_test = evaluate_accuracy(test_data, net)\n",
+    "            train_accuracy, loss_train = evaluate_accuracy(train_data2, net)\n",
+    "\n",
+    "            print(\"Net: Epoch %s. Train Loss: %s, Test Loss: %s, Train_acc %s, Test_acc %s\" %\n",
+    "                 (e, loss_train, loss_test,train_accuracy, test_accuracy))\n",
+    "            \n",
+    "            logs['eps'].append(DPobject.get_eps(delta))\n",
+    "            logs['loss'].append(loss_train)\n",
+    "            logs['train_acc'].append(train_accuracy)\n",
+    "            logs['test_acc'].append(test_accuracy)\n",
+    "            \n",
+    "            learning_rate = learning_rate/2\n",
+    "        "
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 10,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "## Plot some figures!"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 12,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAxUAAAIICAYAAAAYIsP7AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAAMTQAADE0B0s6tTgAAIABJREFUeJzs3Xl8VPW9//H3bNn3zCBLNkgmKLIEZGcmt1rUuiEiUC1q6+69Fay0l3ot12p7a73XXlyQXmtt7e1tqRarlKr9+aioJUEEWS2gkIQlBAUz2ReS2c7vjxlGwhpIYJLwej4ePjIz58x3PpNEct5zPuf7NRmGYQgAAAAAzpA52gUAAAAA6N0IFQAAAAC6hFABAAAAoEsIFQAAAAC6hFABAAAAoEsIFQAAAAC6hFABAAAAoEsIFQAAAAC6hFABAAAAoEsIFQAAAAC6xBrtAg6LjY2Vw+GIdhkAAAAAJFVXV6u9vb1T+/aYUOFwOFRVVRXtMgAAAABIysrK6vS+tD8BAAAA6BJCBQAAAIAuIVQAAAAA6JIec03FyQSDQRmGEe0y0EOZTCaZzeRjAACAaOnRocLr9aqyslI+ny/apaCHs9lsysnJUUxMTLRLAQAAOO/06FBRWVmp5ORkZWZmymQyRbsc9FCGYaimpkaVlZUqKCiIdjkAAADnnR4bKoLBoHw+nzIzM2W19tgy0UNkZmaqtrZWwWCQVigAAIBzrMcefR2+hoIzFOiMw78nXHsDAABw7vXYUAEAAACgdyBUnIaLL75YX3zxxQm3b926VXl5eacc5/3331dRUZEkqb6+Xk888UR3lXhKH330kSZPnqyEhARNnz79jMd59NFH5XA4VFRUpKKiIs2ZM6cbqwQAAEBvQqg4Ddddd51WrFjRrWOe61AxYMAAPf3003rqqae6PNacOXO0efNmbd68Wb///e+7oToAAAD0Rr3qCui7/vcj7a1pPStj52Ym6MVvjjvpPjfccIN+/OMf66677oo89uijj+r3v/+9UlJSdNVVV3XY/+2339aPf/xjHTp0SBaLRf/5n/+pSy+9tMM+9913n5qamlRUVCSr1ar169dr0aJF+sMf/iCfzyebzaZnn31WkyZN6pb3mZWVpaysLG3fvv2YbT6fT//+7/+ud999V16vV4WFhfrFL36h9PT0bnltAAAA9E2cqTgN48eP1yeffKLm5mZJ0ptvvqlly5Zpw4YNWr9+vfbs2RPZd9euXXr00Uf11ltvacOGDVq6dKm+8Y1vqL29vcOYzz//vJKTk7V582atX79eknTrrbfqo48+0ubNm7V48WLdfvvtx61nx44dkfajo/870XNO5sknn1RiYqLWrVunzZs3a8SIEVq4cOEJ91+2bJlGjRqlyy67TO+9995pvx4AAAD6hl51puJUZxLONpPJpCuvvFJ//etfNWvWLK1cuVKzZ89WSkqKJOnee+9VaWmpJOn//b//p/LychUXF0eebzabVVlZecrX2bRpk37yk5+opqZGVqtVO3bs0KFDhxQfH99hv6FDh2rz5s3d9v6WL1+uhoYG/elPf5IUWnzwRNeI3HffffrBD34gm82m1atX64YbbtBHH32k3NzcbqsHAAAAvUOvChU9wfTp0/Wb3/xGs2bNOmbbkdPfGoahyy+/XEuXLj1mv/37959wfK/XqxkzZui9997TuHHj1NjYqNTUVLW3tx8TKnbs2KGvf/3rxx1n9OjReumllzr7tiI1L168WFdccUWHx+vr6/WVr3xFkjR48GC9/vrr6t+/f2T7lClTNHr0aK1fv55QAQAAcB4iVJymSy+9VPfcc498Pp+mTp2qBQsWaP78+UpKStILL7wQ2e/KK6/UY489po8//lgjR46UJK1bt07jx4/vMF5KSooOHTokr9ermJgYtbW1yev1KicnR5K0ePHiE9bS3Wcqpk+frqeeekoul0sJCQlqbW3V7t27dfHFFx/zOlVVVcrKypIklZWVRdqlAAAAcP4hVJwmm82mKVOm6N1339XVV1+tdevWacyYMcdcqF1QUKClS5fq3nvvVWtrq7xer0aPHn3MmYuMjAzddtttGjlypJKSkrR+/Xr9x3/8h8aPHy+73a6bbrqpW+vfsWOHvvrVr6q1tVWHDh1SVlaWHn74Yf3Lv/yLvv/976u9vV0TJkyInHX5/ve/r4svvviYcX7wgx9ow4YNslqtslgsWrJkiQoLC7u1VgAAAPQOJqOHLEGclZWlqqqqyP1AIKCdO3eqsLBQFoslipUd69VXX9XKlSv1P//zP9EuBWE9+fcFAACgNzr6+PxkOFNxBmbOnKmZM2dGuwwAAAD0QYZhdLhWtzcgVAAAAABRdMgb0Lo9tSrZWa2SMo9mj8vWna7B0S7rtBAqAAAAgHMoGDS0/fNGlZZ7VFJWrY9218kbCEqSHMmx6iFXJ5wWQgUAAABwlh1oaFNJWehMxOpyj2pavJKkWKtZE/MzVey0y+W0a+gFyb2u9UkiVAAAAADdrtXr19pdtSopC52NKPuiObLt4oEpmjk2S8VOhy7JTVecrfdPMkOoAAAAALooGDS09bOGSIjYsLdOvkCojemClFjNvCRLbqddUwrssifFRrna7keoOA1FRUWSQqte79ixI7LY29ChQ/XKK6+c1li333677rjjDrnd7pPut2TJEnm9Xj344INnVvRx3HLLLZo4caLuv//+bhsTAADgfLO//pBKj2hpqmv1SZLibRZNKbDL7XSo2GlXQb+kXtnSdDoIFafh8KrSe/bsUVFR0UlXs/b7/bJaT/ztfemllzr1mt/+9rdPr0gAAACcFc3tfn1YUaPSco9WlVVrV3WLJMlkkoYPTNXN40PXRVySm65Ya+9vaTodhIpu8s4772j+/PkaM2aMNm/erEceeUQtLS1avHixfD6fDMPQ448/rquvvlqS5HK59NBDD+naa6/VLbfcouTkZO3YsUNVVVUaNWqUli5dKpvNpoULF6qtrU0/+9nP9OKLL2rZsmVKT0/Xtm3bFB8frz/+8Y/Ky8uTJC1cuFAvv/yy0tPTdcUVV+iVV15ReXn5SetuamrS3LlztX79ehmGoZtvvlkLFy6UJD322GP6wx/+oLi4OEnSG2+8oYyMDH3zm9/Utm3bZLValZWVpbfeeuvsfWMBAACiJBA09I/9DaGpXss92ri3Tv5gqKVpQGqcZo/Nktvp0JQCuzISY6JcbXT1rlCx9CapbvfZGTt9sPSNl7s0xNatW/Xzn/9cLpdLkuTxeHTLLbfIZDJp165dmjx5svbt2yebzXbMc7ds2aKVK1cqJiZGU6ZM0fLlyzVr1qxj9lu7dq22bNmi3Nxcfe9739OTTz6pJUuW6M9//rP+8pe/aPPmzUpMTNRtt93WqZofffRRGYahf/zjH2pubtaUKVN00UUXqbi4WM8++6z279+vuLg4tba2ymKxaMWKFWptbdX27dslSbW1tV34jgEAAPQs+2pbVVLmUWl5tVaX16jhUKilKSHGon8qdMjlDLU15TsS+3xL0+noXaGihyssLIwECknatWuX5syZo/3798tqtaq2tlZ79+5VQUHBMc+dMWOG4uPjJUnjxo1TRUXFcV/D5XIpNzdXkjRp0iT98pe/lCStXLlSs2fPVlJSkiTpzjvv1Jo1a05Z8zvvvKMlS5bIZDIpOTlZt956q/72t7/p+uuvV25urm699VZdfvnluuaaazRo0CCNHj1a8+fP17e//W390z/9U+TMCwAAQG/U1ObTmoqacJDwaLfny5amkYNS5XY65HbaNTonXTFWc5Sr7bl6V6jo4pmEs+3wAf1hs2fP1tNPP63p06dLklJSUtTW1nbc5x5uMZIki8Uiv9/fpf3ONDkffp7VatW6dev0wQcf6P3339eECRP0xz/+UZMnT9Ynn3yilStX6m9/+5sWLFigLVu2KDU19YxeDwAA4FzyB4LaUtWg0vAsTZv21SsQbmkalBavm8dny1Xg0OT8TKWf5y1Np6N3hYpepr6+XoMHh5ZY/81vfqOmpqaz9lqXXXaZHnvsMX3nO99RQkKCfv3rX3fqeVOnTtWLL76oKVOmqKWlRb/73e/0yCOPqLGxUS0tLSouLlZxcbE+/vhjbd68WdnZ2crIyND111+vq666Sq+//rr2799PqAAAAD1WZU2rVpVVq6SsWh9U1KipLfShbFKsVZcO7afiwlBLU15mAi1NZ6hToWLevHlasWKF9u7dq02bNkWmVj3aP/7xD82dO1cHDx6UJP3kJz/RjBkzuq/aXuaZZ57R9ddfr4yMDE2dOlWDBg06a681ffp0rVu3TkVFRUpNTVVxcbHS0tJO+bxHH31Uc+fO1YgRIyIXas+YMUN79+7V17/+dbW0hE4BXnjhhbr11lv197//XT/4wQ8khWa4uvPOOzVs2LCz9r4AAABOV8Mhn9ZUeMJrRnhUWdsqSTKbpFHZaZGWpqLsNNkstDR1B5NhGMapdlq1apWGDBkil8ul5cuXHzdUtLa2avjw4frtb38rl8ulQCCg2tpaORyOThWSlZWlqqqqyP1AIKCdO3eqsLBQFsv5NSXXmWpqalJycrIMw9ADDzwgwzC0ePHiaJd1TvD7AgDA+csXCGrLvnqtCrc0bdlXr3BHk7Iz4iPrRUwaYldqwrET5uD4jj4+P5lOnakoLi4+5T5Lly7VxIkTIxcqWyyWTgcKdI85c+Zo3759amtr04gRI/T8889HuyQAAIBuZxiG9tS0qiS88Nyaiho1t4dampJjrbp82AVyhYNEbmZilKs9P3TbNRXbt29XbGysrr32WlVVVWnkyJH67//+7xMGi0WLFmnRokWR+83Nzd1VynlrxYoV0S4BAADgrKhv9eqDihqVlFVr1U6P9tcfkiRZzCYVZafJHZ7qdVRWqqy0NJ1z3RYq/H6/3nnnHX344YcaOHCgHn74Yf3zP/+zXn311ePuP3/+fM2fPz9yPysrq7tKAQAAQC/n9Qe1qbIudF1EuUcfV9XrcNN+XmaCbp2YK5fTrkn5mUqJo6Up2rotVOTk5OjSSy+NXIx8yy236Morr+yu4QEAANCHGYahiuoWlR5uadpVo1ZvQJKUEmfV1y7uH7nAOjsjIcrV4mjdFipmz56tX/3qV2psbFRKSoreeustjRo1qruGBwAAQB9T2+LV6vLQxdWlZR591hBaz8tqNmlMTrrcTrtcTrtGZqXJYmaq156sU6Hi3nvv1ZtvvqkDBw7oyiuvVHJyssrLy3XXXXdp2rRpmjZtmnJycvTwww9r8uTJMpvNGjRokF544YWzXT8AAAB6iXZ/QBv21oUXnvNo62cNkZamIfZEfXNSrtxOhybmZyopluXUepNOTSl7LjClLLqC3xcAAHoewzBU/kVzZKrXtbtqdcgXamlKS7BpSoFd7oLQ2YisdFqaeppun1IWIYfX5/B6vdqxY4dGjBghSRo6dKheeeWV0xrr9ttv1x133CG3233S/ZYsWSKv16sHH3zwzIoGAAA4hzzN7eGWplCQONjYLkmyWUItTcWFoesiLh6YSktTH8KZijOwZ88eFRUVqb6+/oT7+P1+Wa1ktiMFAoGz9rPsyb8vAAD0ZW2+UEvTqvB1Eds+a4xsK+iXJFeBXW6nXROHZCqRlqZepc+eqZi7cq72Ne07K2NnJ2dr8VfPfPXpd955R/Pnz9eYMWO0efNmPfLII2ppadHixYvl8/lkGIYef/xxXX311ZIkl8ulhx56SNdee61uueUWJScna8eOHaqqqtKoUaO0dOlS2Ww2LVy4UG1tbfrZz36mF198UcuWLVN6erq2bdum+Ph4/fGPf1ReXp4kaeHChXr55ZeVnp6uK664Qq+88orKy8uPqfX//u//TlhXVVWV5s2bp7KyMplMJt1444364Q9/qLq6On33u9/VunXrZLFYNGHCBL3wwgsd6pOkp59+Wlu3btWLL76oF198US+//HLkGpxf//rXeu+997Rs2TL5/X7ZbDY999xzGj9+vKTQWicPPPCADh48KEmaO3euRowYoTvvvFPbtm2L1D9+/Hj95Cc/0eWXX37GPy8AAHBmDMPQjoNNKtkZmup13e4atfmCkqSMxBhdN2pgeM0Iuwakxke5WpwrvSpU9HRbt27Vz3/+88iq4h6PR7fccotMJpN27dqlyZMna9++fbLZjp1LecuWLVq5cqViYmI0ZcoULV++XLNmzTpmv7Vr12rLli3Kzc3V9773PT355JNasmSJ/vznP+svf/mLNm/erMTERN12220nrPOqq646YV0333yzpk2bptdee02SVF1dLUmaN2+e0tLS9PHHH8tsNkceP5UPP/xQmzZtktPplCTl5eVpwYIFkqTS0lLdcccd2rp1q7xer6677jo98cQTkfft8Xhkt9uVlJSkd999V5dddpk++ugjNTY2aurUqZ16fQAA0HVfNLWFWprCQaK6KdTSFGMxa2xeulxOu4qdDg0bkCIzLU3npV4VKrpyJuFcKCwsjAQKSdq1a5fmzJmj/fv3y2q1qra2Vnv37lVBQcExz50xY4bi40Npfty4caqoqDjua7hcLuXm5kqSJk2apF/+8peSpJUrV2r27NlKSkqSJN15551as2bNccc4UV0Oh0Nr167Ve++9F9n38Irob7zxhrZs2SKz2dzh8VNxuVyRQCFJ69ev109/+lPV1tbKarVq+/bt8nq9+uSTT2QYRocgZbfbJUkPPPCAnnvuOV122WVasmSJvv3tb8tk4h8sAADOljZfQOt216q03KNVO6v16YGmyLbCC5I0LXw2YvzgDCXE9KrDSZwl/BZ0o8MH9IfNnj1bTz/9tKZPny5JSklJUVtb23GfGxcXF7ltsVjk9/u7tN/JDrpPp65TjWe1WhUIBCL3jx7nyO9JW1ubZs6cqZKSEo0ZM0a1tbXKzMyU1+s96WvPmjVLDz30kDZt2qS33npLzz777En3BwAApycYNPTpgSaVhBeeW7enVl5/qKXJnhSj6UUD5XI65Cqwq39q3ClGw/nIHO0C+rL6+noNHjxYkvSb3/xGTU1Np3jGmbvsssv06quvqqWlRYZh6Ne//vVp15WamqpJkybpqaeeiux7uM3puuuu05NPPqlgMNjh8YKCAq1fv17BYFAtLS2RtqnjaW1tld/vV3Z2tiRp8eIvzzwNGzZMFotFy5Ytizzm8XgkSTabTffcc4+mTZumWbNmKSUlpfPfGAAAcFwHG9v06oYqfeflTRr/+Du6+tkS/fSvn2rdnlqNz8vQv111od6c59K6h6fq6ZtGa+YlWQQKnBBnKs6iZ555Rtdff70yMjI0depUDRo06Ky91vTp07Vu3ToVFRUpNTVVxcXFSktLO+26li5dqvvvv1/Dhw+XxWLRjTfeqEceeUTPPPOMHnzwQY0YMUI2m00TJ07U888/r1mzZulPf/qTLrzwQmVnZ2vMmDEnPHuSkZGhRx99VOPGjVNmZqZuuummyDabzaYVK1Zo7ty5+tGPfiSz2ax58+bpzjvvlCTdddddeuSRR3T//fd343cNAIDzxyFvQGt316ikzKPSMo92HPzyw84L+ydrxpgsuQpCLU1xNmZSxOlhStk+pKmpScnJyTIMQw888IAMw+hwNqA3e/nll/XSSy/p7bffPu52fl8AAOgoGDS0/fPGyHoR6/fUyRsIdRw4kmPlLrDLXWjXlAK7+iVzBgLH6rNTyuLk5syZo3379qmtrU0jRozQ888/H+2SusXUqVO1e/duLV++PNqlAADQo33ecCgcIjxaXe5RbUvousVYq1kT8zMjQWLoBclMeoJuRajoQ1asWBHtEs6Kd955J9olAADQI7W0+yMtTSVlHpV/0RzZNmxAimaNzVKx06FLctNpacJZRagAAADoJQJBQ9s+a4i0NG3YWydfINTJfkFKrGZekiW3M9TSZE+KjXK1OJ8QKgAAAHqw/fWHVLKzWiXloZam+lafJCneZtGUArvcTofcTruc/ZJoaULUECoAAAB6kOZ2vz6sqAmtGVHu0a7qFkmSySQNH5iqm8fb5XbadUluumKttDShZyBUAAAARFEgaOjjqnqVhq+L2FhZJ38w1NI0IDVOs8dmye10aEqBXRmJMVGuFjg+QsVpKCoqkiR5vV7t2LFDI0aMkCQNHTpUr7zyymmP99prrykrK0vjx4/v1joBAEDPtq+2NXJdxAcVNWo4FGppSoix6J8KHXI5Q21N+Y5EWprQKxAqTsPmzZslSXv27FFRUVHk/pl67bXXNHHixB4VKoLBoEwmE/+AAQDQjRrbfFpTURM+G1GtPTWtkkItTSMHpUauixidk64YqznK1QKnj1DRjf73f/9XP//5z+Xz+ZScnKwlS5Zo+PDhWr16tebOnatgMCi/36958+apf//+euutt/T+++/rxRdf1AMPPKDbb7+9w3j/9V//pWXLlsnv98tms+m5556LBJDt27frgQce0MGDByVJc+fO1d13362qqirNmzdPZWVlMplMuvHGG/XDH/5Qt9xyiyZOnBhZkfo73/mO7Ha7Fi5cqIULF2rHjh1qaGjQvn379P777+uJJ55QaWmpfD6f0tLS9Mtf/lJOp1OStHr1ai1YsEBNTaGVOB9//HE1Nzfrt7/9rd566y1Jkt/v1+DBg7Vy5UoVFhaek+8/AAA9hT8Q1JaqhtB1EWUebd5Xr0C4pWlQWrxuHp8tV4FDk/MzlU5LE/qAXhUq9v3zv8i7r/KsjB2TnaPs//n5GT//73//u1599VWVlJQoJiZG7733nubMmaMtW7bo8ccf17/9279p1qxZkqS6ujqlp6fr6quv7nCgf7Tbb79dCxYskCSVlpbqjjvu0NatW+X1enXdddfpiSeeiIzp8XgkSTfffLOmTZum1157TZJUXV3dqfrXrFmjjRs3ql+/fpKkhx9+WA6HQ5L0u9/9Tg8++KDeeOMNeTwe3XDDDVq+fLkmT56sYDCo+vp6paSkaMGCBaqoqFB+fr5ef/11DRs2jEABADhv7K1p0aoyj0rDLU1NbX5JUlKsVZcO7afiwlBLU15mAh0B6HN6Vajoyf785z9r06ZNHVqZqqur5fV6ddlll+mxxx7Tp59+qq9+9auaPHlyp8Zcv369fvrTn6q2tlZWq1Xbt2+X1+vVJ598IsMwIoFCkux2uxoaGrR27Vq99957kccPB4NTufbaayOBQpLefvttPffcc2publYwGFRjY6Ok0FmK4cOHR96D2WxWRkaGJOm+++7TkiVLtGjRIi1ZskTf/e53O/XaAAD0Rg2HfFpT4QkHCY8qa0MtTWaTNCo7Lbx6tUNF2WmyWWhpQt/Wq0JFV84knG2GYeiOO+7Qj370o2O2ffe739X06dO1cuVKLViwQGPGjNGzzz570vHa2to0c+ZMlZSUaMyYMaqtrVVmZqa8Xu8Z1We1WhUIBDqMf6SkpKTI7V27dumBBx7Q+vXrNXjwYG3cuFFXXHHFKV/jnnvu0ciRI3XTTTepsrJS11xzzRnVCgBAT+QLBLV5X33kAust++oV7mhSdka8vjEhR8VOuyYNsSs1wRbdYoFzrFeFip7s+uuv17e+9S3dc889ysrKUjAY1MaNGzV27Fjt2LFDQ4cOVX5+vgYOHBgJHikpKWpoaDjueK2trfL7/crOzpYkLV68OLJt2LBhslgsWrZsWYf2J7vdrkmTJumpp57Sv/7rv0oKnS1xOBwqKCjQ2rVrI/v+9a9/1d13333c125oaFBsbKz69+8vwzC0ZMmSyDaXy6V77rlHH3zwQYf2p4yMDNntdl199dW68cYbNW/ePJnNfCoDAOi9DMPQnppWlZRVa9VOjz7cVaPm9lBLU3KsVVMvukDuQoeKnXblZiZGuVoguggV3eQrX/mKHn/8cU2bNk2BQEBer1fTpk3T2LFj9fTTT2vVqlWKiYmR1WrVz372M0nSbbfdpjvuuEPLli075kLtjIwMPfrooxo3bpwyMzN10003RbbZbDatWLFCc+fO1Y9+9COZzWbNmzdPd955p5YuXar7779fw4cPl8Vi0Y033qhHHnlE9913n2bOnKlhw4YpPz9fkyZNOuF7GT16tGbMmKFhw4bJbrdr2rRpkW2ZmZl6/fXX9b3vfU/Nzc0ym816/PHHdfXVV0uS7r77bi1dulR33nlnd3+LAQA46+pbvVpdXqPS8lCQ2F9/SJJkMZs0OjstMtXrqKxUWWlpAiJMhmEY0S5CkrKyslRVVRW5HwgEtHPnThUWFspiYbXI3uKJJ57Q7t279Ytf/OKcvi6/LwCAM+H1B7Wpsi7S0vTx/gYdPjLKy0yQ2xlaM2JSfqZS4mhpwvnl6OPzk+FMBbpFIBDQyJEjZTab9fbbb0e7HAAAjsswDFVUt6g0PNXrml01avWGrjlMibPqaxf3j6wZkZ2REOVqgd6DUIFuYbFYtG3btmiXAQDAMWpbvFpdHjoTUVrm0WcNoclKrGaTxuSky+20y+W0a2RWmixmpnoFzgShAgAA9Cnt/oA27K0Lr17t0dbPvmxpGuJI1Dcn5crtdGhifqaSYjkUArpDj/0/6fCiMD3kkg/0cId/T1hMCADOP4ZhqPyLZq0KXxexdletDvlCLU1pCTZdPWKAip12uZwODUqLj3K1QN/UY0OF2WyWzWZTTU2NMjMzOVjECRmGoZqaGtlsNqaxBYDzhKe5PdzSFAoSBxvbJUk2i0mX5KZHrou4eGAqLU3AOdBjQ4Uk5eTkqLKyUrW1tdEuBT2czWZTTk5OtMsAAJwlbb5QS9Oq8HUR2z5rjGwr6JcUPhvh0PjBGUqkpQk453r0/3UxMTEqKChQMBikDQonZDKZOEMBAH2MYRjacbBJJTs9Kin3aN3uGrX5gpKkjMQYTRs1MLxmhF0DUmlpAqKtR4eKwzhgBACg7/uiqS3U0hQOEtVNoZamGItZY/O+bGkaNiBFZlqagB6lV4QKAADQ97T5Alq3u1al5R6t2lmtTw80RbYNvSBZ14fPRkwYnKn4GBY2BXoyQgUAADgngkFDnx5oUkl44bl1e2rl9YdamuxJMZpeNDCygvUFKXFRrhbA6SBUAACAs+ZgY5tKyjwqLatWablHnmavJCnGataEwRlyFdjldjrvMFHqAAAgAElEQVR0Yf9kWpqAXoxQAQAAus0hb0Brd9eEg4RHOw5+2dJ00YAUzRiTJbfTrnF5GYqz0dIE9BWECgAAcMaCQUPbP2+MrBexfk+dvIFQS5MjOVYzxgyS22nXlAK7+iXT0gT0VYQKAABwWj5vOBQOER6tLveotiXU0hRnM2tSfqbczlBLU+EFSSxeC5wnCBUAAOCkWtr9kZamkjKPyr9ojmy7eGCKZo/Nlttp1yW56bQ0AecpQgUAAOggEDS07bOGSEvThr118gVCi9D2T4nTrEuy5Aq3NNmTYqNcLYCegFABAAC0v/6QSnZWq6Q81NJU3+qTJMXbLJEZmtxOuwr60dIE4FiECgAAzkPN7X59WFETWjOi3KNd1S2SJJNJGjEoVXMm2OUqcGhMbppirbQ0ATg5QgUAAOeBQNDQx1X1kaleN1bWyR8MtTQNTI3T18dmy11o1+R8uzISY6JcLYDehlABAEAfta+2NXJdxOpyjxrb/JKkxBiLvjLUEWprKnRoiD2RliYAXUKoAACgj2hs82lNRY1Kw0FiT02rJMlskkZkpak4PNVrUXaaYqzmKFcLoC8hVAAA0Ev5A0FtCbc0lZR5tHlfvQLhlqZBafG6eXyO3E67JudnKi2BliYAZw+hAgCAXmRvTYtWlXlUWlatDypq1BRuaUqKteqyC/up2GmXy+lQXmYCLU0AzhlCBQAAPVhDq08fVHhUUh5qadpXe0hSqKWpKDtNLqdDxU67RmWnyWahpQlAdBAqAADoQXyBoDbvq4+sGbFlX73CHU3KzUzQnAk5cjsdmpSfqdR4W3SLBYAwQgUAAFFkGIZ2e1pUWu7Rqp0efbirRs3toZam5DirrhjWXy6nXW6nXbmZiVGuFgCOj1ABAMA5Vt/q1ery8MJzZR7trw+1NFnMJo3OTpPb6ZDLadeorFRZaWkC0AsQKgAAOMu8/qA2VtZFpnr9eH+DjHBL02B7om6blCtXgV0T8zOVEkdLE4Deh1ABAEA3MwxDFdUtkTMRH+6qUas3IElKjbfpquH9Q2cjCuzKzkiIcrUA0HWECgAAukFti1el5aGpXkvKPPq8oU2SZDWbNCY3PTLV64hBqbKYmeoVQN9CqAAA4Ay0+wPasLcuvPBctbZ91hhpacp3JOpbk/Pkdto1YUimkmL5cwugb+NfOQAAOsEwDJV90axVO6tVWu7R2l21OuQLtTSlJ9h0zYgBcofPRgxKi49ytQBwbhEqAAA4AU9zu1aHp3otLa/WwcZ2SZLNYtIluelyOx0qdjp08cAUmWlpAnAeI1QAABDW5gto/Z46lZRXq2SnR9s/b4xsc/ZL0jUjBsrttGv84Awl0tIEABH8iwgAOG8ZhqFPDzSptMyjVWXVWre7Vu3+oCQpMzFG00YNDLc02TUglZYmADiRToWKefPmacWKFdq7d682bdqkoqKiE+5rGIa++tWvauPGjaqvr++2QgEA6A5fNLWptMwTWjOi3KPqplBLU4zFrHGD0yNTvQ4bQEsTAHRWp0LFzJkztWDBArlcrlPu+9RTTyk/P18bN27scnEAAHRVmy+gdbtrI2tGfHqgKbJt6AXJun7UQLkLHRqfl6H4GEsUKwWA3qtToaK4uLhTg23btk3Lly/XSy+9pGXLlnWpMAAAzkQwaOiTA40qCZ+NWLenVt5wS5M9KVY3jB4kV0GopemClLgoVwsAfUO3XVPh8/l0991361e/+pUsFj7pAQCcOwcb2yLrRZSWeVTT4pUkxVrNmjA4Q64Cu4oLHbqwf7JMJlqaAKC7dVuoeOyxxzRjxgxddNFF2rNnzyn3X7RokRYtWhS539zc3F2lAAD6uFavX2t316okPNXrzoNf/g25aECKZl6SJZfTrnF5GYqz8UEXAJxtJsM4vP7nqeXl5Wn58uXHvVDb7XarsrJSJpNJfr9fn332mXJycvTRRx/J4XCccuysrCxVVVWdXvUAgPNCMGho22eNkaleN+ytkzcQamnqlxwrl9OuYqdDUwrsciTHRrlaAOgbTuf4vNvOVJSUlERu79mzR0VFRZ06YwEAwPF8Vn8oMkPT6nKPasMtTXE2syblZ8rttMvtdKjwgiRamgAgyjoVKu699169+eabOnDggK688kolJyervLxcd911l6ZNm6Zp06ad7ToBAH1cS7tfa3fXaNXO0LURFdUtkW0XD0zR7LHZKnbaNSY3nZYmAOhhTqv96Wyi/QkAzi+BoKGt+xsiU71urKyTLxD6k9Q/JS6y6NyUArvsSbQ0AcC5FpX2JwAATqWqrjXU0lTm0eoKj+pbfZKkeJtFroJQO5PbaVdBP1qaAKA3IVQAAM6apjafPtxVG5nqdZcn1NJkMkkjBqXqG+NDQWJMbppirbQ0AUBvRagAAHQbfyCoj/c3hM9GVGtTZb38wVBL08DUOH19bLbchXZNzrcrIzEmytUCALoLoQIA0CX7alu1KnwmYnW5R41tfklSYoxFXxnqCLU1FTo0xJ5ISxMA9FGECgDAaWls82lNRU3kAuu9Na2SJLNJGpGVpuLwVK9F2WmKsZqjXC0A4FwgVAAATsofCGpLVb1W7fSotNyjzfvqFQi3NA1Ki9fN47Pldjo0OT9TaQm0NAHA+YhQAQDowDAM7a1pVUm5RyU7q7WmokZN7aGWpqRYqy67sJ+KnXa5nA7lZSbQ0gQAIFQAAKSGVp8+qPBoVZlHpeXV2ld7SFKopakoO00up0PFTrtGZafJZqGlCQDQEaECAM5DvkBQmyrrVVpWrVVlHn1cVa9wR5NyMhI0Z0KO3E67JuXblRpvi26xAIAej1ABAOcBwzC029OikvDCcx/uqlFzuKUpOc6qK4b1l8tpl9tpV25mYpSrBQD0NoQKAOij6lq8Wl3hiaxgvb8+1NJkMZs0OjtNbqdDLqddo7JSZaWlCQDQBYQKAOgjvP6gNlbWRVav/nh/g4xwS9Nge6JunZgrt9OuifmZSomjpQkA0H0IFQDQSxmGoYrq5g4tTa3egCQpNd6mq4b3D52NKLArOyMhytUCAPoyQgUA9CK1LV6VlntUGl547vOGNkmS1WzSmNx0ucOrV48YlCqLmaleAQDnBqECAHqwdn9AG/bUhdaMKKvWts8aIy1NQxyJ+tbkPLkKQi1NSbH8kw4AiA7+AgFAD2IYhsq+aNaqndUqLfdo7a5aHfKFWprSEmy6ZsQAucMLzw1Ki49ytQAAhBAqACDKPM3tWl3u0aqdoYXnDja2S5JsFpMuyU2X2+mQ22nXxQNpaQIA9EyECgA4x9p8Aa3fE5qlqaTMo+2fN0a2Ofsl6eoRA1TsdGj84Awl0tIEAOgF+GsFAGeZYRj69ECTSss8WlVWrXW7a9XuD0qSMhJjNG3UwHBLk10DUmlpAgD0PoQKADgLvmhqU2lZeOG5co+qm0ItTTEWs8YNTperINTSNGxAisy0NAEAejlCBQB0g0PegNbtqY1M9frpgabItqEXJOv6UQPlLnRofF6G4mMsUawUAIDuR6gAgDMQDBr65ECjSsJnI9btqZU33NJkT4rR9KKBoYXnnHZdkBIX5WoBADi7CBUA0EkHG9vCq1dXq7TMo5oWryQp1mrWhMEZoesiChy6sH8yLU0AgPMKoQIATqDV69fa3bUqCU/1uvNgc2TbRQNSdOMlWXI77RqXl6E4Gy1NAIDzF6ECAMKCQUPbPmtUSXm1SnZ6tGFvnbyBUEuTIzlWM8YMkttp15QCu/ol09IEAMBhhAoA57XP6g9Fpnr9oKJGteGWpjibWZPyM+V22uV2OlR4QZJMJlqaAAA4HkIFgPNKS7tfa3fXaNXO0LURFdUtkW0XD0zR7LHZcjvtuiQ3nZYmAAA6iVABoE8LBA1t3d8QWb16Y2WdfAFDktQ/JU4zw9dFTCmwy54UG+VqAQDonQgVAPqcqrrW0KJzZR6trvCovtUnSYq3WeQqCLUzuZ12FfSjpQkAgO5AqADQ6zW1+fThrtrIVK+7PKGWJpNJGjEoVd8YHwoSY3LTFGulpQkAgO5GqADQ6/gDQX28vyF8NqJamyrr5Q+GWpoGpsbp62Oz5S60a3K+XRmJMVGuFgCAvo9QAaBX2FfbqlXhMxGryz1qbPNLkhJjLPrKUEeoranQoSH2RFqaAAA4xwgVAHqkxjafPiivUWl56ALrvTWtkiSzSRqRlaZip12uArtG56QrxmqOcrUAAJzfCBUAegR/IKgtVfVatdOj0nKPNu+rVyDc0jQoLV43j8+W2+nQ5PxMpSXQ0gQAQE9CqAAQFYZhaG9Nq0rKPSrZWa01FTVqag+1NCXFWnXZhf1CZyOcDuVlJtDSBABAD0aoAHDONLT69EGFR6vKPCotr9a+2kOSQi1NRdlpcoWnei3KTpPNQksTAAC9BaECwFnjCwS1qbJepWXVWlXm0cdV9Qp3NCknI0FzJuTI7bRrUr5dqfG26BYLAADOGKECQLcxDEO7PS0qCS889+GuGjWHW5qS46y6Ylh/uZx2uZ125WYmRrlaAADQXQgVALqkrsWr1RWeyArW++tDLU0Ws0mjs9Pkdjrkcto1KitVVlqaAADokwgVAE6L1x/Uxsq6yOrVH+9vkBFuaRpsT9Rtk3LlKrBrYn6mUuJoaQIA4HxAqABwUoZhqKK6uUNLU6s3IElKjbfpquH9Q2cjCuzKzkiIcrUAACAaCBUAjlHb4lVpeKrX0nKPPm9okyRZzSaNyU2PTPU6YlCqLGamegUA4HxHqACgdn9AG/bUhdaMKKvWts8aIy1N+Y5EfWtyntxOuyYMyVRSLP9sAACAjjg6AM5DhmGo7ItmrQqfiVi7q1aHfKGWpvQEm64ZMUDF4QusB6bFR7laAADQ0xEqgPOEp7ldq8s9WrUztPDcwcZ2SZLNYtLY3Ay5nHYVOx26eGCKzLQ0AQCA00CoAPqoNl9A6/eEZmkqKfNo++eNkW3Ofkm6ZsTAcEtThhJi+KcAAACcOY4kgD7CMAx9eqBJpWUerSqr1rrdtWr3ByVJmYkxur5ooFwFdrmdDvVPjYtytQAAoC8hVAC92BdNbZFF50rLPapuCrU0xVjNGpeXHpnqddgAWpoAAMDZQ6gAepFD3oDW7alVabil6dMDTZFtF/ZP1vSigXI5HRqfl6H4GEsUKwUAAOcTQgXQgwWDhj450Bg6E1Hm0bo9tfKGW5rsSbG6YfQguZ12uQrs6pdCSxMAAIgOQgXQwxxsbAuvXl2t0jKPalq8kqRYq1kTBmdEpnq9sH+yTCZamgAAQPQRKoAoa/X6tXZ3rUrCU73uPNgc2TZsQIpmXpIlt9OhsXnpirPR0gQAAHoeQgVwjgWDhrZ91qiS8mqV7PRow946eQOhlqZ+ybG6cUyW3E67phTY5UiOjXK1AAAAp0aoAM6Bz+oPRaZ6/aCiRrXhlqY4m1mT8jPldtpVXOiQs18SLU0AAKDXIVQAZ0FLu19rd9do1c7QtREV1S2RbcMHpejr47LlLrDrkrx0xVppaQIAAL0boQLoBoGgoa37GyKrV2+srJMvYEiS+qfEadYlWXIXOjQlP1OZSbQ0AQCAvoVQAZyhqrrWyMJzqys8qm/1SZISYixyOx1yO+1yO+3Kd9DSBAAA+jZCBdBJTW0+fbirNjLV6y5PqKXJZJJGDkrVnAl2uZ0OjclJV4zVHOVqAQAAzh1CBXAC/kBQH+9vCJ+NqNamynr5g6GWpkFp8bppXLbcTocm52cqPTEmytUCAABED6ECOEJlTWtkqtcPKjxqbPNLkhJjLPrK0H6RlqbB9kRamgAAAMI6FSrmzZunFStWaO/evdq0aZOKioqO2efdd9/VQw89pObmZplMJl1zzTV64oknZDbTBoKeq+GQT2sqalRaHrrAem9NqyTJbJJGZqWp2GmXy+nQ6Jw02Sz8LgMAABxPp0LFzJkztWDBArlcrhPuk56erpdffllDhgxRW1ubpk6dqt/+9rf61re+1V21Al3mDwS1eV+9SsItTVuqGhQItzRlpcfr5vE5KnbaNTnfrtQEW5SrBQAA6B06FSqKi4tPuc/o0aMjt+Pi4lRUVKQ9e/accWFAdzAMQ3trWiNTva6pqFFTe6ilKTnWqq9e2E/uQofcBXblZibQ0gQAAHAGzso1FQcOHNCrr76qN95442wMD5xUQ6tPqys8kbMRVXWHJEkWs0lF2WlyFdhVXGjXqKw0WWlpAgAA6LJuDxWNjY267rrrtGDBAo0dO/aE+y1atEiLFi2K3G9ubu7uUnCe8AWC2lRZHzkb8XFVvcIdTcrNTNAtE3Pkdjo0KT9TKXG0NAEAAHS3bg0VTU1N+trXvqbrr79e8+fPP+m+8+fP77BPVlZWd5aCPswwDO3ytESmel1TUaMWb0CSlBJn1ZUX95fLaZe7wKGczIQoVwsAAND3dVuoaG5u1te+9jV97Wtf08KFC7trWECSVNfiDbU07fSotNyj/fWhliar2aTROWlyOx1yOe0aOSiVliYAAIBzrFOh4t5779Wbb76pAwcO6Morr1RycrLKy8t11113adq0aZo2bZqeeeYZrVu3Ti0tLXrttdckSbNmzdIPfvCDs/oG0Dd5/UFt2FsXmer1H/sbZIRbmobYE3XbpFy5nQ5NHJKhZFqaAAAAospkGIcP1aIrKytLVVVV0S4DUWIYhiqqm7VqZ6ilae3uWrWGW5pS421yFYQWnXM57cpKp6UJAADgbDud43NW1EbU1DS3q7TcE742wqMDjW2SQi1Nl+Smh1evdmj4oFRZzEz1CgAA0FMRKnDOtPsD2rCnTqvCF1hv+6wxsi3fkahvTc6T22nXhCGZSorlVxMAAKC34MgNZ41hGNp5sDky1eva3TVq8wUlSekJNl03aqDcBaGWpoFp8VGuFgAAAGeKUIFuVd3UrtXlHq0qq1ZpmUdfNLVLkmIs5lBLU2FoqteLB6bITEsTAABAn0CoQJe0+QL6aE9tePVqjz75/MuWpsILknTtyIFyF9o1YXCGEmL4dQMAAOiLOMrDaTEMQ5983hSZ6nXd7lq1+0MtTZmJMbq+aGBozYgCu/qnxkW5WgAAAJwLhAqc0heNbSopCy06V1Lmkac53NJkNWt8XkZo9WqnXRf1p6UJAADgfESowDEOeQNau7smMtXrjoNNkW0X9k/WDaMHyuV0aHxehuJjLFGsFAAAAD0BoQIKBg1t/7wxfDaiWh/trpM3EGppsifF6obRg0ILzxXY1S+FliYAAAB0RKg4Tx1oaItM9bq63KOaFq8kKdZq1oQhGSp2OuRy2nVh/2SZTLQ0AQAA4MQIFeeJVq9fa3fVRqZ6LfuiObJt2IAUzbwkS26nQ2Pz0hVno6UJAAAAnUeo6KOCQUNbP2sIT/VarQ176+QLGJKkfsmxunFMlooL7Zqcb5cjOTbK1QIAAKA3I1T0IfvrD6n0iJamulafJCnOZtaUgtA1EcWFDjn7JdHSBAAAgG5DqOjFmtv9+rCiRqXhFax3VbdEtg0flKKbxufIXWDXJXnpirXS0gQAAICzg1DRiwSChv6xv0ElO6tVUu7Rxr118gdDLU0DUuM065IsuQsdmpKfqcwkWpoAAABwbhAqerh9ta3hReeqtbq8Rg2HQi1NCTEWFRc65A4vPJfvoKUJAAAA0UGo6GGa2nxaU1ETWcF6tyfU0mQySSMHpYZXr3ZoTE66YqzmKFcLAAAAECqizh8I6uP9DSrZGTobsWlfvQLhlqZBafG6aVy2XE67puTblZ4YE+VqAQAAgGMRKqKgsqY1sl7E6gqPmtr8kqTEGIsuHdov0tI02J5ISxMAAAB6PELFOdBw6HBLU2i618raVkmS2SSNzEpTsdMul9Oh0TlpslloaQIAAEDvQqg4C3yBoLbsq9eqMo9Ky6q1eV+9wh1NykqP183jc1TsDC08l5pgi26xAAAAQBcRKrqBYRjaU9Oq0rJqrSrz6MOKGjW1h1qakmOtmnrRBXIXOuQusCs3M4GWJgAAAPQphIozVN/q1QfhWZpKyqpVVXdIkmQxm1SUnRZevdquUVlpstLSBAAAgD6MUNFJvkBQmyrrVRI+G/GPqi9bmnIzE3TLxBy5ChyalJ+p1HhamgAAAHD+IFScgGEY2uVpUcnOapWWe7SmokYt3oAkKSXOqiuG9Ze70C53gUM5mQlRrhYAAACIHkLFEepavFpd4VHJztDCc/vrv2xpGpOTJrfTIZfTrpGDUmlpAgAAAMIIFWHPvFOmp1fulBFuaRpiT9Rtk3Lldjo0cUiGkuNoaQIAAACOh1ARNmxgiq4ePkAup12uAruyM2hpAgAAADqDUBF2+bALdPmwC6JdBgAAANDrcGEAAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC4hVAAAAADoEkIFAAAAgC7pVKiYN2+e8vLyZDKZtHnz5hPu96tf/UpOp1P5+fm6++675fP5uq1QAAAAAD1Tp0LFzJkzVVpaqtzc3BPus3v3bv37v/+7SkpKVF5eroMHD+qFF17otkIBAAAA9EydChXFxcXKyso66T6vvvqqpk2bpv79+8tkMum+++7TH/7wh24pEgAAAEDP1W3XVFRWVnY4k5GXl6fKysruGh4AAABADxW1C7UXLVqkrKysyH/Nzc3RKgUAAABAF3RbqMjJydHevXsj9/fs2aOcnJwT7j9//nxVVVVF/ktKSuquUgAAAACcQ90WKm688UatWLFCBw4ckGEYev7553XTTTd11/AAAAAAeqhOhYp7771XWVlZqqqq0pVXXqmCggJJ0l133aUVK1ZIkoYMGaLHHntMU6ZMUUFBgRwOh+69996zVzkAAACAHsFkGIYR7SIkRUILAAAAgOg7neNzVtQGAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdQqgAAAAA0CWECgAAAABdYo12AQAA9BSGYUhH/hcMyght6PCfETQkGcc+fvTzI49JMoJHPa7QGMHgsfvqiNc/+jHDkBEMhsc0jv+4jlPDUTUfr65I/ZFvyDE3jtpuHHHTOM52HXff4453vLE6M8ap6jlyjBPse6L3d8r3dDTT4a+mLx864vaRj3+58xGPd9j3yM0nGCNy+0TbjzPG6dRz5MOdeU/HrUedd7LvbWd2ME45wCn3MbphjFO/j1OPETM4T3FDh3ZioJ6DUAGgWxmGIfn9Mvx+GYGADJ9PCgRC9/0BKeCP3Db8R27zh24HDh/gBI86eDrO/cMHXIcPlo68Hx7jVPcjB4gnvR9+zWBQUvj+abzGSe9HxuzE/fDBpxE5CFTHA9jwXzLjuNtC9w0dtU1f3u6wTR2fb0QO4I4aN/KH0Tj2dY83to59/qnGPqbmY8Y+fs2RbYcfP/Lg/eh9g8Ez/I0HgO6Xcccdilvwr9Eu47QQKoBzxDCMjgfXfl/ooNvvDx2EH3lwHTkg94cOwsO3jUB4mz/8vMCXt0+2TQF/+PlHHsifYFtkvEDHcHDSbf7IfQ7OupnJJJnNoU/7TGbJbAp/GmgK3T68j0zhL0c8ZlJoP5NJRmS30L7G0ftGnnP49uHn6thPIo8Y2zi875HPPfL1TeHX6lCTOfLppXG8esNfOmyTcZx9dYL3YhzzuCEd8b07sj7zsa8Vqbvj97DD+zy8j456vMP36Igajtmmo8bXEc856nGz6cuf45Hfy/DP3zh6/CN+L4xTvqcjX1tHvJ9jP2yN/KzVcVvH20fci3zvjdBrHw5+h8cwjGM+Ef8yoh47XuS9G1+Occy+JlPH5xynRpl0bC0m05ch9qhP1g+P1+F9HvkejsN0nDMbR9Z1vO3H3DnyW3mC1wmdMQvvc2Q4j+xwgp/UCfc54rFT1GUyjvp5H2c8U4enHf89HONUZzZMJ9nBOPbnd0YvcorNRif26QzjJO8l5+IcXdD1lzinCBXoMyKfkPt8MrzeDl+DXq8Mr0+G7/DXY/fp+PXo/Y8z3uH7nRkv/Cl8j2azyWSxyGS1ymSxSFbrl7dtVpksVpliYmSKj5csFslqkWExf/nVEvoaNJskq0VBs0mGNXT/8ONBsxQM3w5Y1PGrWQqYFNon/OfHCN8OmkL3gzJkmKSgyYjsc/h2+PP8L7eZpICCof0NQ4HDY5iCCij06Xgg8tzQtoCCCpqCCoZfKyBD/7+9+41t66z7P/65zrGdxEnbdP2zhjru2LJCx7aWjv5+pZTdG1WlGwTZ1Eo8gIpZoqRU8AD1CbAHCCSIeICCENM9OjSFTRUTUtsbcaMhxJ9tDKSh9LcyujKxdixNsmV323Vdl7/28Tm/B8d2bMdJnF5OnYT3Szqyz7muc52vV6+9Pj7n2L78Qp9sri2rbK6PLy+3dzhWOLYf+MqaQNnAz43jy8+teya3bxDu68tX1vjKBn6h3iA/uZr2D87U2Qjg39Zsk7lq/veYKRHVy0yvp5pJq1vLQrCYpOIJ/d96FzFPhArMW+D7CiYm5E9O5ibN1UzWZ5+kl07+09WPW7atquspa8Fxwgl2NDrt0WlpkYlGpWhEQTSiwHUU5CbeQcRVkJtkB64j3zXyHQbqLkoAABnnSURBVEe+m59s5x6NlI1IvslNtl2jrBMo65hwsupKngnkOYE8R8oWPc8YX54TKOP48hQoY3xlHF8Zk1Xa+PKMr7TxlTZZpU02bA88ZeXL871wCTxl/WzueVqen1sPPPnB8joT4RhHjhwZY+QaV8aYcFt+0dTzfB/HODIych1Xpqi90E9GUeOqwcwwbtk+xhg5cnLjmWnjFY9bPpYkmaLZhzGmsG5kStbDD6FnaMvtW9xWeJ7/9Lp4vehYxddXl7eVj1dx/NnaisabVmOFY007dlnbfJjZPhGdaZ95Hut6aptvXdf1+ov+vEseK/1ZF72/Km5X6Z9b+Thz9i0bv9I4s9U55z4VXtOctVRoq0qVXecz5rz6zuO9c73/39Tq+Nd9jAWoux7HaW1sXdDxFwKhYpkKgkBBOi1/bEzB2Jj88fFwGRuXPzaqIL8+mm8bC/sW+kxtD8bGwm3j4fZgYuLGvAjXLZ2wx6LhpD0Wk2lskROdalNhEu/Kj0TCT8gjrrLRcOLuRZ1wYu4aea6UiZhwEh4xyjiB0m6gtOOHj8bXpOtr0vGVdrKacLKaMJ4mTFYTjqdx42lSGWWyGU1mJ5XOppX208pkx5X231U6m1bGz9yY/0bl8vP9CidFIiYi13EVcSJyTfgYMZHw0Qnbom5UTZGmkj75ffJ9C/vmF1Ohj+MWtkedaMk+5fsX1+I6bmHSPtOk2jHO/Cb/+Ul9hX3LJ7QAAOD6ECrqLPC8ypP9wkQ+P9EvntiPVpj857YXTf6trm2PRuXE43KamuQ0NcltbVW0rU0m3iQn3iynsVFBLFqYvPsRR9mIo2zEyI+48lzJc428SPiYcQNlXCnjBkqbQJlIoEnjK+36SjvhBH7C8TRpsoXJ+2SQyU3WpybvGT+Tm8S/Fz7mFz9t+QehcCI+yxVKESeimBNTzI0p5sQUdaNqcBvUEmsprBfa3aJ+TlRRNzqvyXe+X9REpweBOSbylSbs+Yk0AADAQiBUVCEIgqnJfu7T/WB8bNrkv+ST/nzb+Fg40S+Z/E+dPQjSFpNhY+Q0NcnE41MBYPVNcuLxcPLfVLQ93iTTFJffGFU6ZpSOuZqIBhrPLWORrEZcT+9FMnrPSWskmNBoZrSwjGRGNJp+T6PeWxpNj2rMG1M2qPIeAT+3zPPD+4iJhBP13OQ85oYT9MZIo1a5qxR1oqWT99ykvsFtCPsWT/KLQkB5/5gb7lOpf6HNiRYuNQEAAEApQkXOu//zP3r3v/+76HKgqSAQjI9bXatvYrGSiX5k7drCGQCnOS7TVBQA4k1TQaEpXM82RDUZk8YjgcZj0qjraSzia8RMajQ7ptH0qEa9UY2kRzTmjWkkPZILA8MayYxoLDNWePR8T5pQuFSpOdpcWNY1rdOmlZvUHGmemojnJuHTJua5EJCfmBdP1Es+0a/QP+bE5DrcgQYAALAUECpyvP/9X43//UzuU/24nJYWRdatK/2kPz/Rz/eJN4WBoCgA5PtnG6Iaj+YCQDChkaJP/Quf/Jdsu6qRzFAhAIxmRjU6MqqRd0bCIDBPTZEmtURb1Bxt1urG1UqsSKg52qyWaIvi0XihLb+0RFvUHGtWc6S58NgSa1FTpIlP6AEAADArE1T104ELL5FIaGhoqK41eL5XNtGfPvkv/uS/0hmC/OP1XOPf6DaGE/xYi+KRuFpiLSWT/OZY87QwUBIKcs/jkTif8gMAAMDKfObnnKnIefRvj+q/Xvqvee/X4DZMTexjLbq5+eaKE/2ZzhLktzVHmxVx+OMAAADA0sMsNqdjdYf+85b/nD0AlJ9BiDYr6kbrXToAAABQV4SKnL2b9mrvpr31LgMAAABYcrgDFwAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBSdag4d+6cdu3apc2bN2vHjh06e/bstD6+7+vIkSO64447dPfdd+v+++/X+fPna1owAAAAgMWl6lBx6NAhdXV16dVXX9XXv/51pVKpaX1+9atf6S9/+Yteeukl/f3vf9eePXv08MMP17JeAAAAAItMVaHi4sWLOnXqlA4cOCBJ2r9/vwYHB6edhTDGaHJyUhMTEwqCQNeuXVMikah91QAAAAAWjUg1nQYHB9XW1qZIJOxujFEymdTAwIA6OjoK/T7zmc/omWee0YYNG7RixQpt3LhRzz333MJUDgAAAGBRqOmN2qdOndLLL7+sN954Q2+++ab27NmjL3/5yxX79vT0KJFIFJaRkZFalgIAAADgBqkqVLS3t2t4eFie50mSgiDQwMCAkslkSb8nn3xSn/jEJ9Ta2irHcfTQQw/pmWeeqTjmkSNHNDQ0VFhaWlosXwoAAACAeqgqVKxfv17bt2/XsWPHJEknTpxQIpEoufRJkm699Vb98Y9/VDqdliT9+te/1p133lnjkgEAAAAsJlXdUyFJR48eVSqVUnd3t1auXKne3l5J0sGDB9XZ2anOzk595Stf0SuvvKKtW7cqGo1qw4YN+slPfrJgxQMAAACoPxMEQVDvIiQpkUhoaGio3mUAAAAA0Pzm5/yiNgAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYqTpUnDt3Trt27dLmzZu1Y8cOnT17tmK/M2fO6L777tOWLVu0ZcsWnTx5smbFAgAAAFh8ItV2PHTokLq6upRKpXT8+HGlUin19fWV9BkbG9MDDzygJ598Urt371Y2m9WVK1dqXjQAAACAxaOqMxUXL17UqVOndODAAUnS/v37NTg4qPPnz5f0+/nPf66dO3dq9+7dkiTXdbVu3boalwwAAABgMakqVAwODqqtrU2RSHhiwxijZDKpgYGBkn7/+Mc/1NDQoE9/+tPatm2bvvCFL+jSpUu1rxoAAADAolHTG7U9z9Pvf/97HT16VKdPn9bGjRt1+PDhin17enqUSCQKy8jISC1LAQAAAHCDVBUq2tvbNTw8LM/zJElBEGhgYEDJZLKkXzKZ1P3336+NGzfKGKMDBw7ohRdeqDjmkSNHNDQ0VFhaWlosXwoAAACAeqgqVKxfv17bt2/XsWPHJEknTpxQIpFQR0dHSb/Pfvaz6uvr07Vr1yRJTz/9tLZu3VrjkgEAAAAsJlV/+9PRo0eVSqXU3d2tlStXqre3V5J08OBBdXZ2qrOzU8lkUg8//LB27dolx3G0ceNGPfbYYwtWPAAAAID6M0EQBPUuQpISiYSGhobqXQYAAAAAzW9+zi9qAwAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhIu/dIWn4JWn8nXpXAgAAACwpkXoXsGi89JT0x++GzxtWSa1JafWm8LE1KbVumtrWsKK+tQIAAACLCKEi7/3/Id3nS1cHpKsXwuWfv5GC7PS+TatLQ0brpqn11nYp1nzj6wcAAADqhFCR1/5/wqVY1pOuvZELGQPh8s6FqeAx/JKkYPpYzeumn91oTUqtt0irElK08Ua8IgAAAOCGIFTMxo2EgWD1psrtXlp6d7Do7EZZ8Hjj/1Xer2VDUdAoCx6r2iU3unCvCQAAAKgxQoWNSExac1u4VJIZD28Af+dCUejIPb79mjT41+n7GEda8b6Z7+dY8b4w7AAAAACLBLPThRRtktbeHi6VpEfLzm4UBY+L/5Au/GX6Pk5EWrmx7H6OouCxok1y+FIvAAAA3DiEinqKNUvrt4RLJRPXZr60avglqf/56fu4sfC+jZluJG9ZLxmzsK8LAAAA/1YIFYtZ40ppw53hUi4IpImrRTeOF4WPdy6El1b965np+0UaK19Wlb+RPH4ToQMAAADzUnWoOHfunB566CFdvnxZq1at0s9+9jN96EMfqtg3CALt2bNHL774oq5evVqzYlHEmPCrbZtWS+/bNr09CKSxt8OgUSl49P9Z8n4/fb9o88z3c7Qmw+MBAAAARaoOFYcOHVJXV5dSqZSOHz+uVCqlvr6+in1/+MMf6rbbbtOLL75Ys0IxT8ZIzWvDZeM909t9Xxq9VHofR/HX5Z7/g+Rnpu+X/2HA1vbwUqp47hj5x/zz+JrwRnYAAAAseyYIggo/tFDq4sWL6ujo0JUrVxSJRBQEgdra2vTnP/9ZHR0dJX3Pnj2rw4cPq7e3V/fcc0/VZyoSiYSGhoau71Wg9nxfem+47AxHUfB4d6jyDwMWa1w1c+gofx5fSwgBAABYROYzP6/qTMXg4KDa2toUiYTdjTFKJpMaGBgoCRWZTEZf+tKX9Pjjj8t13VnH7OnpUU9PT2F9ZGSkqoJxgziOtGpjuGz66PR23w/v6Ri9HJ7xGLscPh97O1wfvZzb9rZ05XVp6NTcIaRhldS8ZubQ0bwm/GHB/LZIw8K8dgAAAMxLTW/U/s53vqN9+/Zpy5Yt6u/vn7XvkSNHdOTIkcJ6IpGoZSlYaI4T3tQdv0lat3nu/vkQUil0lIeSqxekN1+UfG/2MWMrysJHWegoDyf8kjkAAMCCqCpUtLe3a3h4WJ7nFS5/GhgYUDKZLOn33HPPaWBgQI888og8z9O1a9d0yy23qK+vT+vWrVuQF4AlojiEzPS7HcXy3241LXTkHotDydVB6c3TVYSQlgpnP2Z5Hm2qzWsHAABY5qoKFevXr9f27dt17NgxpVIpnThxQolEYtr9FM8/P/W7Cf39/dq2bducZyyAioq/3Wptx9z9g0CaeLfCmZCyMDJ2Wbr2Rvg7H5VuRC8WbZ77PpDibbF4bV47AADAElP15U9Hjx5VKpVSd3e3Vq5cqd7eXknSwYMH1dnZqc7OzgUrEpiTMVJTa7isuW3u/kEgTV6bHjpGL+WCSX7bJenasPTWGSmbnn3MaHzm0BFryS3x8EcPo83hY/ESaeLX0AEAwJJU1bc/3Qh8+xMWtSCQJt+rHDpG364cSrKT8z9OtLlC8IiHgSQaLw0h09Zn6e/M/sUJAAAA5Wr+7U/Avz1jwl84b1xZ/ZmQ9EguZFwJA0l6VMqMhdvTY7n10fAxndueyW3PL+NXcs9HpMC//vojjbmQUXy2JL8+V3CpdIYlt92NXn9NAABg2SBUAAvBGKlhRbjcdKv9eEEgeZPVBZGS9hn6v/fWVP+57i2ZjROdRwipcNlXpf7RRsmNSU4k/O8IAAAWPUIFsBQYE062o42S1tR2bC+dCx5jU2dFpgWVKs+wjF0JfxwxMyZ5E5aFmTBcRBrCMyJu7jHSUPY83xYLf0DRjRXtV7yef168b6zCMaroS+ABAKAEoQL4dxfJTaKbVtd23Kw3FTbKQ8lswcWbDG+K9yalbCa8NyWbmdqezYWg8dzz4va5fmCxlioFmfKAUjGclIeXKsYp6VsUcpyIZJypxXFL18uXae0EIwBAbRAqACwMNyK5uftQbhQ/WyGQpMOzMdmipWJ7hfAyV7gpGXsyd9ZnPPx64/J95/odlbows4QOIxl3hvb8fpXai8ecb3t5EJpPu6kQmspfT9GxC6/dVFivtK3Cusl9W9ucffLPVUWf4nHmOP68ai7eNlPN+aBpKj8SQgHMglABYPlwXMlpWpw/XOj7pWGkPHR4Fc68zBVufC+8gb/S4mevr21aexCeAarYXtyWlbxMWZs/w75B0bZsaZsWxRcSYk4zBI9Z23LthW2V+s6xf8U+muUY86lRVe5fqU/RvoW28vHK/xPOd7/ybde73zzrWrAaytoq1jCfPjP1u57j3Yi65hjr1v+QtnymwliLF6ECAG4Ex5Gc/H0xmFEQzBFI5gosM7TnA0vx+ApK+5evF7bNss+0bUEVfYrHneX4Kg5fqqJPfpuq6BPMMK5f9BrKHvN/PoVtFdYr7Tfj/hXGmbHPfI4xS59Zxy4//gzjzPS6i9/DxdtKvrm/fFswrWn++83nePMcC/UTbSRUAABw3QqX2TjinyhgEQlqGWzK2iodZ1596jFWpd1qWNdiPOM+B/7GBgAAwOzMbJcwAeFHQQAAAABw3QgVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYIVQAAAAAsEKoAAAAAGCFUAEAAADACqECAAAAgBVCBQAAAAArhAoAAAAAVggVAAAAAKwQKgAAAABYMUEQBPUuQpIaGhq0bt26utYwMjKilpaWutaA5Yn3FhYC7yssBN5XWCi8t5aeS5cuaXJysqq+iyZULAaJREJDQ0P1LgPLEO8tLATeV1gIvK+wUHhvLW9c/gQAAADACqECAAAAgBX329/+9rfrXcRi8tGPfrTeJWCZ4r2FhcD7CguB9xUWCu+t5Yt7KgAAAABY4fInAAAAAFYIFQAAAACsECpyzp07p127dmnz5s3asWOHzp49W++SsMRNTEzowQcf1ObNm7V161bt3btX58+fr3dZWEZ6e3tljNEvf/nLepeCZWJyclJf/epXdfvtt+uuu+7SgQMH6l0SloGnn35a27dv17Zt23TnnXfqiSeeqHdJWACRehewWBw6dEhdXV1KpVI6fvy4UqmU+vr66l0Wlriuri598pOflDFGjzzyiA4ePKhnn3223mVhGejv79dPf/pT7dy5s96lYBn5xje+IWOMXn31VRlj9NZbb9W7JCxxQRDowIEDevbZZ3X33Xerv79fH/zgB7Vv3z6tWLGi3uWhhjhTIenixYs6depU4ROZ/fv3a3BwkE+VYaWxsVGf+tSnZIyRJO3cuVP9/f31LQrLgu/7OnjwoH784x+roaGh3uVgmRgdHdXjjz+u733ve4W/tzZs2FDnqrAcGGN09epVSdK1a9e0Zs0a/u5ahggVkgYHB9XW1qZIJDxxY4xRMpnUwMBAnSvDcvKjH/1IDzzwQL3LwDLQ09Ojj33sY7rnnnvqXQqWkddee0033XSTuru79ZGPfEQf//jH9Yc//KHeZWGJM8boF7/4hfbt26dNmzZp9+7deuKJJxSLxepdGmqMy5+AG6C7u1vnz5/nH2hYe/nll3XixAn96U9/qncpWGY8z9OFCxd0xx136Pvf/75Onz6tvXv36uzZs7r55pvrXR6WKM/z9N3vflcnT57Uvffeq76+PnV2durMmTNau3ZtvctDDXGmQlJ7e7uGh4fleZ6k8Pq/gYEBJZPJOleG5eAHP/iBTp48qd/85jeKx+P1LgdL3PPPP6/+/n7dfvvtuuWWW/TCCy+oq6tLjz76aL1LwxKXTCblOI4+//nPS5I+/OEP6/3vf7/OnDlT58qwlP3tb3/Tm2++qXvvvVeStGPHDiUSCZ0+fbrOlaHWCBWS1q9fr+3bt+vYsWOSpBMnTiiRSKijo6POlWGp6+np0VNPPaXf/e53am1trXc5WAYOHz6s4eFh9ff3q7+/Xzt37tRjjz2mw4cP17s0LHFr167Vnj179Nvf/laS9Prrr+v111/Xli1b6lwZlrL8B7evvPKKJOn8+fN67bXX9IEPfKDOlaHW+EXtnH/+859KpVJ6++23tXLlSvX29uquu+6qd1lYwoaGhtTe3q5bb7218A0XDQ0N+utf/1rnyrCc3Hffffra176mBx98sN6lYBn417/+pS9+8Yu6fPmyHMfRt771Le3fv7/eZWGJe+qpp9Td3S3HceT7vr75zW/qc5/7XL3LQo0RKgAAAABY4fInAAAAAFYIFQAAAACsECoAAAAAWCFUAAAAALBCqAAAAABghVABAAAAwAqhAgAAAIAVQgUAAAAAK4QKAAAAAFb+PzaYKIHpYnQ0AAAAAElFTkSuQmCC\n",
+      "text/plain": [
+       "<matplotlib.figure.Figure at 0x7f9c64f0ad10>"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    }
+   ],
+   "source": [
+    "import matplotlib.pyplot as plt\n",
+    "%matplotlib inline \n",
+    "\n",
+    "plt.figure(num=1, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')\n",
+    "plt.plot(range(epochs), logs['eps'])\n",
+    "plt.plot(range(epochs), logs['loss'])\n",
+    "plt.plot(range(epochs), logs['train_acc'])\n",
+    "plt.plot(range(epochs), logs['test_acc'])\n",
+    "\n",
+    "plt.legend(['\\delta = 1e-5', 'Training loss', 'Training accuracy','Test accuracy'], loc='best')\n",
+    "plt.show()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.12"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/example/private-learning/example_dpdl.py b/example/private-learning/example_dpdl.py
new file mode 100644
index 00000000000..fe814c8efac
--- /dev/null
+++ b/example/private-learning/example_dpdl.py
@@ -0,0 +1,212 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+# An example to train a deep learning model with differential privacy
+# Author: Yu-Xiang Wang
+
+
+# import packages for DP
+from pydiffpriv import cgfbank, dpacct
+
+# import packages needed for deep learning
+import mxnet as mx
+from mxnet import nd, autograd
+from mxnet import gluon
+import dpdl_utils
+
+ctx = mx.cpu()
+
+
+# ## Get data:  standard MNIST
+
+
+mnist = mx.test_utils.get_mnist()
+num_inputs = 784
+num_outputs = 10
+batch_size = 1 # this is set to get per-example gradient
+
+
+
+train_data = mx.io.NDArrayIter(mnist["train_data"], mnist["train_label"],
+                               batch_size, shuffle=True)
+test_data = mx.io.NDArrayIter(mnist["test_data"], mnist["test_label"],
+                              64, shuffle=True)
+train_data2 = mx.io.NDArrayIter(mnist["train_data"], mnist["train_label"],
+                               64, shuffle=True)
+
+
+# ## Build a one hidden layer NN with Gluon
+
+
+
+num_hidden = 1000
+net = gluon.nn.HybridSequential()
+with net.name_scope():
+    net.add(gluon.nn.Dense(num_hidden, in_units=num_inputs,activation="relu"))
+    net.add(gluon.nn.Dense(num_outputs,in_units=num_hidden))
+
+# get and save the parameters
+params = net.collect_params()
+params.initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx)
+params.setattr('grad_req', 'write')
+
+# define loss function
+softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
+
+
+# ## Use a new optimizer called privateSGD
+# Basically, we add Gaussian noise to the stochastic gradient.
+
+
+# define the update rule
+def privateSGD(x, g, lr, sigma,wd=0.0,ctx=mx.cpu()):
+    for (param,grad) in zip(x.values(), g):
+        v=param.data()
+        v[:] = v - lr * (grad +wd*v+ sigma*nd.random_normal(shape = grad.shape).as_in_context(ctx))
+# Utility function to evaluate error
+
+def evaluate_accuracy(data_iterator, net):
+    acc = mx.metric.Accuracy()
+    loss_fun = .0
+    data_iterator.reset()
+    for i, batch in enumerate(data_iterator):
+        data = batch.data[0].as_in_context(ctx).reshape((-1, 784))
+        label = batch.label[0].as_in_context(ctx)
+        output = net(data)
+        predictions = nd.argmax(output, axis=1)
+        acc.update(preds=predictions, labels=label)
+        loss = softmax_cross_entropy(output, label)
+        loss_fun = loss_fun*i/(i+1) + nd.mean(loss).asscalar()/(i+1)
+    return acc.get()[1], loss_fun
+
+
+# ## Now let's try attaching a privacy accountant to this data set
+
+
+
+# declare a moment accountant from pydiffpriv
+DPobject = dpacct.anaCGFAcct()
+
+# Specify privacy specific inputs
+thresh = 4.0 # limit the norm of individual gradient
+sigma = thresh
+
+delta = 1e-5
+
+func = lambda x: cgfbank.CGF_gaussian({'sigma': sigma/thresh}, x)
+
+
+# ## We now specify the parameters needed for learning
+
+#
+epochs = 10
+learning_rate = .1
+
+n = train_data.num_data
+batchsz = 100 #
+
+count = 0
+niter=0
+moving_loss = 0
+
+grads = dpdl_utils.initialize_grad(params,ctx=ctx)
+
+
+# ## Let's start then!
+
+
+# declare a few place holder for logging
+logs = {}
+logs['eps'] = []
+logs['loss'] = []
+logs['MAloss'] = []
+logs['train_acc'] = []
+logs['test_acc'] = []
+
+
+
+
+for e in range(epochs):
+    # train_data.reset()  # Reset does not shuffle yet
+    train_data = mx.io.NDArrayIter(mnist["train_data"], mnist["train_label"],
+                                   batch_size, shuffle=True)
+    for i, batch in enumerate(train_data):
+        data = batch.data[0].as_in_context(ctx).reshape((-1, 784))
+        label = batch.label[0].as_in_context(ctx)
+        with autograd.record():
+            output = net(data)
+            loss = softmax_cross_entropy(output, label)
+        loss.backward()
+
+        # calculate an moving average estimate of the loss
+        count += 1
+        moving_loss = .999 * moving_loss + .001 * nd.mean(loss).asscalar()
+        est_loss = moving_loss / (1 - 0.999 ** count)
+
+        # Add up the clipped individual gradient
+        dpdl_utils.accumuate_grad(grads, params, thresh)
+
+        #print(i)
+        if not (i + 1) % batchsz:  # update the parameters when we collect enough data
+
+            privateSGD(params, grads, learning_rate/batchsz,sigma,wd=0.1,ctx=ctx)
+
+            # Keep track of the privacy loss
+            DPobject.compose_subsampled_mechanism(func,1.0*batchsz/n)
+
+
+            dpdl_utils.reset_grad(grads)
+
+        if count % (10*batchsz) is 0:
+            print("[%s] Loss: %s. Privacy loss: eps = %s, delta = %s " % (((count+1)/batchsz),est_loss,DPobject.get_eps(delta),delta))
+            logs['MAloss'].append(est_loss)
+        ##########################
+        #  Keep a moving average of the losses
+        ##########################
+
+        if count % 60000 is 0:
+            test_accuracy, loss_test = evaluate_accuracy(test_data, net)
+            train_accuracy, loss_train = evaluate_accuracy(train_data2, net)
+
+            print("Net: Epoch %s. Train Loss: %s, Test Loss: %s, Train_acc %s, Test_acc %s" %
+                 (e, loss_train, loss_test,train_accuracy, test_accuracy))
+
+            logs['eps'].append(DPobject.get_eps(delta))
+            logs['loss'].append(loss_train)
+            logs['train_acc'].append(train_accuracy)
+            logs['test_acc'].append(test_accuracy)
+
+            learning_rate = learning_rate/2
+
+
+
+## Plot some figures!
+
+
+import matplotlib.pyplot as plt
+get_ipython().magic(u'matplotlib inline')
+
+plt.figure(num=1, figsize=(12, 8), dpi=80, facecolor='w', edgecolor='k')
+plt.plot(range(epochs), logs['eps'])
+plt.plot(range(epochs), logs['loss'])
+plt.plot(range(epochs), logs['train_acc'])
+plt.plot(range(epochs), logs['test_acc'])
+
+plt.legend(['\delta = 1e-5', 'Training loss', 'Training accuracy','Test accuracy'], loc='best')
+plt.show()
+


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services