You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by zh...@apache.org on 2017/12/30 06:47:17 UTC
[incubator-mxnet] branch master updated: fix bayesian-methods
example (#8958)
This is an automated email from the ASF dual-hosted git repository.
zhasheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git
The following commit(s) were added to refs/heads/master by this push:
new 7e1c227 fix bayesian-methods example (#8958)
7e1c227 is described below
commit 7e1c2275898201fe4c41db0249817808cf3397a3
Author: Yizhi Liu <ja...@gmail.com>
AuthorDate: Fri Dec 29 22:47:13 2017 -0800
fix bayesian-methods example (#8958)
* fix sgld optimizer and its related example
* fix bdk
* fix data_loader for example bdk_demo.py
* no ssl verify for bdk_demo download
---
example/bayesian-methods/algos.py | 3 +--
example/bayesian-methods/bdk.ipynb | 20 ++++++++++----------
example/bayesian-methods/data_loader.py | 5 +++--
example/bayesian-methods/sgld.ipynb | 8 ++++----
4 files changed, 18 insertions(+), 18 deletions(-)
diff --git a/example/bayesian-methods/algos.py b/example/bayesian-methods/algos.py
index e47a18f..f7b3620 100644
--- a/example/bayesian-methods/algos.py
+++ b/example/bayesian-methods/algos.py
@@ -140,8 +140,7 @@ def SGD(sym, data_inputs, X, Y, X_test, Y_test, total_iter_num,
optimizer = mx.optimizer.create('sgd', learning_rate=lr,
rescale_grad=X.shape[0] / minibatch_size,
lr_scheduler=lr_scheduler,
- wd=prior_precision,
- arg_names=params.keys())
+ wd=prior_precision)
updater = mx.optimizer.get_updater(optimizer)
start = time.time()
for i in range(total_iter_num):
diff --git a/example/bayesian-methods/bdk.ipynb b/example/bayesian-methods/bdk.ipynb
index 8c98651..46436c1 100644
--- a/example/bayesian-methods/bdk.ipynb
+++ b/example/bayesian-methods/bdk.ipynb
@@ -25,6 +25,7 @@
},
"outputs": [],
"source": [
+ "from __future__ import print_function\n",
"import mxnet as mx\n",
"import mxnet.ndarray as nd\n",
"import numpy\n",
@@ -40,10 +41,9 @@
" origin = (\n",
" 'https://github.com/sxjscience/mxnet/raw/master/example/bayesian-methods/mnist.npz'\n",
" )\n",
- " print 'Downloading data from %s to %s' % (origin, data_path)\n",
- " context = ssl._create_unverified_context()\n",
- " urllib.request.urlretrieve(origin, data_path, context=context)\n",
- " print 'Done!'\n",
+ " print('Downloading data from %s to %s' % (origin, data_path))\n",
+ " urllib.request.urlretrieve(origin, data_path)\n",
+ " print('Done!')\n",
" dat = numpy.load(data_path)\n",
" X = (dat['X'][:training_num] / 126.0).astype('float32')\n",
" Y = dat['Y'][:training_num]\n",
@@ -136,7 +136,7 @@
" teacher_updater = mx.optimizer.get_updater(teacher_optimizer)\n",
" student_updater = mx.optimizer.get_updater(student_optimizer)\n",
" start = time.time()\n",
- " for i in xrange(total_iter_num):\n",
+ " for i in range(total_iter_num):\n",
" # 1.1 Draw random minibatch\n",
" indices = numpy.random.randint(X.shape[0], size=minibatch_size)\n",
" X_batch = X[indices]\n",
@@ -170,7 +170,7 @@
"\n",
" if (i + 1) % 2000 == 0:\n",
" end = time.time()\n",
- " print \"Current Iter Num: %d\" % (i + 1), \"Time Spent: %f\" % (end - start)\n",
+ " print(\"Current Iter Num: %d\" % (i + 1), \"Time Spent: %f\" % (end - start))\n",
" test_correct, test_total, test_acc = \\\n",
" sample_test_acc(student_exe, X=X_test, Y=Y_test, label_num=10,\n",
" minibatch_size=minibatch_size)\n",
@@ -183,11 +183,11 @@
" teacher_train_correct, teacher_train_total, teacher_train_acc = \\\n",
" sample_test_acc(teacher_exe, X=X, Y=Y, label_num=10,\n",
" minibatch_size=minibatch_size)\n",
- " print \"Student: Test %d/%d=%f, Train %d/%d=%f\" % (test_correct, test_total, test_acc,\n",
- " train_correct, train_total, train_acc)\n",
- " print \"Teacher: Test %d/%d=%f, Train %d/%d=%f\" \\\n",
+ " print(\"Student: Test %d/%d=%f, Train %d/%d=%f\" % (test_correct, test_total, test_acc,\n",
+ " train_correct, train_total, train_acc))\n",
+ " print(\"Teacher: Test %d/%d=%f, Train %d/%d=%f\" \\\n",
" % (teacher_test_correct, teacher_test_total, teacher_test_acc,\n",
- " teacher_train_correct, teacher_train_total, teacher_train_acc)\n",
+ " teacher_train_correct, teacher_train_total, teacher_train_acc))\n",
" start = time.time()\n"
]
},
diff --git a/example/bayesian-methods/data_loader.py b/example/bayesian-methods/data_loader.py
index 2649eb5..92ca0cf 100644
--- a/example/bayesian-methods/data_loader.py
+++ b/example/bayesian-methods/data_loader.py
@@ -29,8 +29,9 @@ def load_mnist(training_num=50000):
'https://github.com/sxjscience/mxnet/raw/master/example/bayesian-methods/mnist.npz'
)
print('Downloading data from %s to %s' % (origin, data_path))
- context = ssl._create_unverified_context()
- urllib.request.urlretrieve(origin, data_path, context=context)
+ ctx = ssl._create_unverified_context()
+ with urllib.request.urlopen(origin, context=ctx) as u, open(data_path, 'wb') as f:
+ f.write(u.read())
print('Done!')
dat = numpy.load(data_path)
X = (dat['X'][:training_num] / 126.0).astype('float32')
diff --git a/example/bayesian-methods/sgld.ipynb b/example/bayesian-methods/sgld.ipynb
index 197dd1e..3c1d008 100644
--- a/example/bayesian-methods/sgld.ipynb
+++ b/example/bayesian-methods/sgld.ipynb
@@ -141,14 +141,14 @@
" lr_scheduler=lr_scheduler,\n",
" wd=0)\n",
"updater = mx.optimizer.get_updater(optimizer)\n",
- "theta = mx.random.normal(0, 1, (2,), mx.cpu())\n",
+ "theta = mx.random.normal(0, 1, (2,), ctx=mx.cpu())\n",
"grad = nd.empty((2,), mx.cpu())\n",
"samples = numpy.zeros((2, total_iter_num))\n",
"start = time.time()\n",
- "for i in xrange(total_iter_num):\n",
+ "for i in range(total_iter_num):\n",
" if (i+1)%100000 == 0:\n",
" end = time.time()\n",
- " print \"Iter:%d, Time spent: %f\" %(i + 1, end-start)\n",
+ " print(\"Iter:%d, Time spent: %f\" %(i + 1, end-start))\n",
" start = time.time()\n",
" ind = numpy.random.randint(0, X.shape[0])\n",
" synthetic_grad(X[ind], theta, sigma1, sigma2, sigmax, rescale_grad=\n",
@@ -244,4 +244,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
-}
\ No newline at end of file
+}
--
To stop receiving notification emails like this one, please contact
['"commits@mxnet.apache.org" <co...@mxnet.apache.org>'].