You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by zh...@apache.org on 2022/09/01 05:20:09 UTC
[singa] branch dev updated: Update inline comments for cifar_distributed_cnn resnet_cifar10
This is an automated email from the ASF dual-hosted git repository.
zhaojing pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/singa.git
The following commit(s) were added to refs/heads/dev by this push:
new b1a8f2e7 Update inline comments for cifar_distributed_cnn resnet_cifar10
new f1327525 Merge pull request #996 from KimballCai/dev
b1a8f2e7 is described below
commit b1a8f2e70c162d9d7587a787a3e86ee9d2375455
Author: qingpeng <qi...@u.nus.edu>
AuthorDate: Wed Aug 31 21:09:42 2022 +0800
Update inline comments for cifar_distributed_cnn resnet_cifar10
---
examples/cifar_distributed_cnn/autograd/resnet_cifar10.py | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/examples/cifar_distributed_cnn/autograd/resnet_cifar10.py b/examples/cifar_distributed_cnn/autograd/resnet_cifar10.py
index 0d6379b5..a8e6efd9 100644
--- a/examples/cifar_distributed_cnn/autograd/resnet_cifar10.py
+++ b/examples/cifar_distributed_cnn/autograd/resnet_cifar10.py
@@ -138,7 +138,7 @@ def reduce_variable(variable, dist_opt, reducer):
return output
-# Function to sychronize SINGA TENSOR initial model parameters
+# Function to synchronize SINGA TENSOR initial model parameters
def synchronize(tensor, dist_opt):
dist_opt.all_reduce(tensor.data)
dist_opt.wait()
@@ -159,7 +159,7 @@ def train_cifar10(DIST=False,
nccl_id=None,
partial_update=False):
- # Define the hypermeters for the train_cifar10
+ # Define the hyperparameters for the train_cifar10
sgd = opt.SGD(lr=0.005, momentum=0.9, weight_decay=1e-5)
max_epoch = 5
batch_size = 32
@@ -199,7 +199,7 @@ def train_cifar10(DIST=False,
idx = np.arange(train_x.shape[0], dtype=np.int32)
if DIST:
- #Sychronize the initial parameters
+ # Synchronize the initial parameters
autograd.training = True
x = np.random.randn(batch_size, 3, IMG_SIZE,
IMG_SIZE).astype(np.float32)
@@ -220,7 +220,7 @@ def train_cifar10(DIST=False,
if ((DIST == False) or (sgd.global_rank == 0)):
print('Starting Epoch %d:' % (epoch))
- #Training phase
+ # Training phase
autograd.training = True
train_correct = np.zeros(shape=[1], dtype=np.float32)
test_correct = np.zeros(shape=[1], dtype=np.float32)
@@ -258,11 +258,11 @@ def train_cifar10(DIST=False,
flush=True)
if partial_update:
- # Sychronize parameters before evaluation phase
+ # Synchronize parameters before evaluation phase
for p in param:
synchronize(p, sgd)
- #Evaulation phase
+ # Evaluation phase
autograd.training = False
for b in range(num_test_batch):
x = test_x[b * batch_size:(b + 1) * batch_size]
@@ -275,7 +275,7 @@ def train_cifar10(DIST=False,
to_categorical(y, num_classes))
if DIST:
- # Reduce the evaulation accuracy from multiple devices
+ # Reduce the evaluation accuracy from multiple devices
test_correct = reduce_variable(test_correct, sgd, reducer)
# Output the evaluation accuracy