You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@singa.apache.org by wa...@apache.org on 2015/05/09 15:43:53 UTC

[2/2] incubator-singa git commit: remove compile warnings

remove compile warnings


Project: http://git-wip-us.apache.org/repos/asf/incubator-singa/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-singa/commit/654d733b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-singa/tree/654d733b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-singa/diff/654d733b

Branch: refs/heads/master
Commit: 654d733ba73a304001de2cc3d1f115e95e094b97
Parents: 831efef
Author: wang wei <wa...@comp.nus.edu.sg>
Authored: Sat May 9 21:42:53 2015 +0800
Committer: wang wei <wa...@comp.nus.edu.sg>
Committed: Sat May 9 21:42:53 2015 +0800

----------------------------------------------------------------------
 configure                      | 461 +++++++-----------------------------
 include/neuralnet/base_layer.h |  61 +++--
 include/neuralnet/layer.h      |  62 ++++-
 include/trainer/worker.h       |   8 -
 src/communication/socket.cc    |   4 +-
 src/neuralnet/layer.cc         |   4 +-
 src/trainer/server.cc          |   1 -
 src/trainer/trainer.cc         |   3 +-
 src/trainer/worker.cc          |  18 +-
 src/utils/cluster.cc           |   2 +-
 src/utils/updater.cc           |   2 +-
 11 files changed, 202 insertions(+), 424 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/configure
----------------------------------------------------------------------
diff --git a/configure b/configure
index 773b3a5..1e06269 100755
--- a/configure
+++ b/configure
@@ -1,6 +1,6 @@
 #! /bin/sh
 # Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.59 for singa 0.1.
+# Generated by GNU Autoconf 2.69 for singa 0.1.
 #
 # Report bugs to <de...@singa.incubator.apache.org>.
 #
@@ -266,11 +266,11 @@ fi
     $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
     $as_echo "$0: be upgraded to zsh 4.3.4 or later."
   else
-    $as_echo "$0: Please tell bug-autoconf@gnu.org and singa@apache.com
-$0: about your system, including any error possibly output
-$0: before this message. Then install a modern shell, or
-$0: manually run the script under such a shell if you do
-$0: have one."
+    $as_echo "$0: Please tell bug-autoconf@gnu.org and
+$0: dev@singa.incubator.apache.org about your system,
+$0: including any error possibly output before this
+$0: message. Then install a modern shell, or manually run
+$0: the script under such a shell if you do have one."
   fi
   exit 1
 fi
@@ -737,6 +737,7 @@ PACKAGE_TARNAME='singa'
 PACKAGE_VERSION='0.1'
 PACKAGE_STRING='singa 0.1'
 PACKAGE_BUGREPORT='dev@singa.incubator.apache.org'
+PACKAGE_URL=''
 
 ac_unique_file="src/utils/common.cc"
 # Factoring default headers for most tests.
@@ -1622,7 +1623,7 @@ test -n "$ac_init_help" && exit $ac_status
 if $ac_init_version; then
   cat <<\_ACEOF
 singa configure 0.1
-generated by GNU Autoconf 2.59
+generated by GNU Autoconf 2.69
 
 Copyright (C) 2012 Free Software Foundation, Inc.
 This configure script is free software; the Free Software Foundation
@@ -1631,8 +1632,9 @@ _ACEOF
   exit
 fi
 
-It was created by singa $as_me 0.1, which was
-generated by GNU Autoconf 2.59.  Invocation command line was
+## ------------------------ ##
+## Autoconf initialization. ##
+## ------------------------ ##
 
 # ac_fn_cxx_try_compile LINENO
 # ----------------------------
@@ -1863,9 +1865,9 @@ $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;}
 $as_echo "$as_me: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&2;}
     { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( $as_echo "## ------------------------------- ##
-## Report this to singa@apache.com ##
-## ------------------------------- ##"
+( $as_echo "## --------------------------------------------- ##
+## Report this to dev@singa.incubator.apache.org ##
+## --------------------------------------------- ##"
      ) | sed "s/^/$as_me: WARNING:     /" >&2
     ;;
 esac
@@ -2248,7 +2250,7 @@ cat >config.log <<_ACEOF
 This file contains any messages produced by compilers while
 running configure, to aid debugging if configure makes a mistake.
 
-It was created by singa $as_me 0.0.1, which was
+It was created by singa $as_me 0.1, which was
 generated by GNU Autoconf 2.69.  Invocation command line was
 
   $ $0 $@
@@ -5547,44 +5549,6 @@ if test $ac_cv_header_stdc = yes; then
 
 $as_echo "#define STDC_HEADERS 1" >>confdefs.h
 
-# So?  What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in
-  yes:no: )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5
-echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;}
-    ac_header_preproc=yes
-    ;;
-  no:yes:* )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5
-echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     check for missing prerequisite headers?" >&5
-echo "$as_me: WARNING: $ac_header:     check for missing prerequisite headers?" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5
-echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&5
-echo "$as_me: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5
-echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;}
-    (
-      cat <<\_ASBOX
-## --------------------------------------------- ##
-## Report this to dev@singa.incubator.apache.org ##
-## --------------------------------------------- ##
-_ASBOX
-    ) |
-      sed "s/^/$as_me: WARNING:     /" >&2
-    ;;
-esac
-echo "$as_me:$LINENO: checking for $ac_header" >&5
-echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6
-if eval "test \"\${$as_ac_Header+set}\" = set"; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
-else
-  eval "$as_ac_Header=\$ac_header_preproc"
 fi
 
 # On IRIX 5.3, sys/types and inttypes.h are conflicting.
@@ -18703,141 +18667,67 @@ if test "$ac_res" != no; then :
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find cblas_sgemm() function" >&5
-echo "$as_me: error: unable to find cblas_sgemm() function" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find cblas_sgemm() function" "$LINENO" 5
 
 fi
 
-echo "$as_me:$LINENO: checking for library containing zmq_ctx_new" >&5
-echo $ECHO_N "checking for library containing zmq_ctx_new... $ECHO_C" >&6
-if test "${ac_cv_search_zmq_ctx_new+set}" = set; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing zmq_ctx_new" >&5
+$as_echo_n "checking for library containing zmq_ctx_new... " >&6; }
+if ${ac_cv_search_zmq_ctx_new+:} false; then :
+  $as_echo_n "(cached) " >&6
 else
   ac_func_search_save_LIBS=$LIBS
-ac_cv_search_zmq_ctx_new=no
-cat >conftest.$ac_ext <<_ACEOF
-/* confdefs.h.  */
-_ACEOF
-cat confdefs.h >>conftest.$ac_ext
-cat >>conftest.$ac_ext <<_ACEOF
+cat confdefs.h - <<_ACEOF >conftest.$ac_ext
 /* end confdefs.h.  */
 
-/* Override any gcc2 internal prototype to avoid an error.  */
+/* Override any GCC internal prototype to avoid an error.
+   Use char because int might match the return type of a GCC
+   builtin and then its argument prototype would still apply.  */
 #ifdef __cplusplus
 extern "C"
 #endif
-/* We use char because int might match the return type of a gcc2
-   builtin and then its argument prototype would still apply.  */
 char zmq_ctx_new ();
 int
 main ()
 {
-zmq_ctx_new ();
+return zmq_ctx_new ();
   ;
   return 0;
 }
 _ACEOF
-rm -f conftest.$ac_objext conftest$ac_exeext
-if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5
-  (eval $ac_link) 2>conftest.er1
-  ac_status=$?
-  grep -v '^ *+' conftest.er1 >conftest.err
-  rm -f conftest.er1
-  cat conftest.err >&5
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; } &&
-	 { ac_try='test -s conftest$ac_exeext'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; }; then
-  ac_cv_search_zmq_ctx_new="none required"
-else
-  echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-fi
-rm -f conftest.err conftest.$ac_objext \
-      conftest$ac_exeext conftest.$ac_ext
-if test "$ac_cv_search_zmq_ctx_new" = no; then
-  for ac_lib in zmq; do
+for ac_lib in '' zmq; do
+  if test -z "$ac_lib"; then
+    ac_res="none required"
+  else
+    ac_res=-l$ac_lib
     LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-    cat >conftest.$ac_ext <<_ACEOF
-/* confdefs.h.  */
-_ACEOF
-cat confdefs.h >>conftest.$ac_ext
-cat >>conftest.$ac_ext <<_ACEOF
-/* end confdefs.h.  */
+  fi
+  if ac_fn_c_try_link "$LINENO"; then :
+  ac_cv_search_zmq_ctx_new=$ac_res
+fi
+rm -f core conftest.err conftest.$ac_objext \
+    conftest$ac_exeext
+  if ${ac_cv_search_zmq_ctx_new+:} false; then :
+  break
+fi
+done
+if ${ac_cv_search_zmq_ctx_new+:} false; then :
 
-/* Override any gcc2 internal prototype to avoid an error.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-/* We use char because int might match the return type of a gcc2
-   builtin and then its argument prototype would still apply.  */
-char zmq_ctx_new ();
-int
-main ()
-{
-zmq_ctx_new ();
-  ;
-  return 0;
-}
-_ACEOF
-rm -f conftest.$ac_objext conftest$ac_exeext
-if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5
-  (eval $ac_link) 2>conftest.er1
-  ac_status=$?
-  grep -v '^ *+' conftest.er1 >conftest.err
-  rm -f conftest.er1
-  cat conftest.err >&5
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; } &&
-	 { ac_try='test -s conftest$ac_exeext'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; }; then
-  ac_cv_search_zmq_ctx_new="-l$ac_lib"
-break
 else
-  echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-fi
-rm -f conftest.err conftest.$ac_objext \
-      conftest$ac_exeext conftest.$ac_ext
-  done
+  ac_cv_search_zmq_ctx_new=no
 fi
+rm conftest.$ac_ext
 LIBS=$ac_func_search_save_LIBS
 fi
-echo "$as_me:$LINENO: result: $ac_cv_search_zmq_ctx_new" >&5
-echo "${ECHO_T}$ac_cv_search_zmq_ctx_new" >&6
-if test "$ac_cv_search_zmq_ctx_new" != no; then
-  test "$ac_cv_search_zmq_ctx_new" = "none required" || LIBS="$ac_cv_search_zmq_ctx_new $LIBS"
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_zmq_ctx_new" >&5
+$as_echo "$ac_cv_search_zmq_ctx_new" >&6; }
+ac_res=$ac_cv_search_zmq_ctx_new
+if test "$ac_res" != no; then :
+  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find zmq_ctx_new() function" >&5
-echo "$as_me: error: unable to find zmq_ctx_new() function" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find zmq_ctx_new() function" "$LINENO" 5
 
 fi
 
@@ -18897,9 +18787,7 @@ if test "$ac_res" != no; then :
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find zmsg_new() function" >&5
-echo "$as_me: error: unable to find zmsg_new() function" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find zmsg_new() function" "$LINENO" 5
 
 fi
 
@@ -18942,9 +18830,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find gflags library" >&5
-echo "$as_me: error: unable to find gflags library" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find gflags library" "$LINENO" 5
 
 fi
 
@@ -18987,9 +18873,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find glog library" >&5
-echo "$as_me: error: unable to find glog library" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find glog library" "$LINENO" 5
 
 fi
 
@@ -19032,9 +18916,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find protobuf library" >&5
-echo "$as_me: error: unable to find protobuf library" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find protobuf library" "$LINENO" 5
 
 fi
 
@@ -19077,9 +18959,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find opencv_imgproc lib" >&5
-echo "$as_me: error: unable to find opencv_imgproc lib" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find opencv_imgproc lib" "$LINENO" 5
 
 fi
 
@@ -19122,9 +19002,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find opencv_highgui lib" >&5
-echo "$as_me: error: unable to find opencv_highgui lib" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find opencv_highgui lib" "$LINENO" 5
 
 fi
 
@@ -19167,9 +19045,7 @@ _ACEOF
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find opencv_core lib" >&5
-echo "$as_me: error: unable to find opencv_core lib" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find opencv_core lib" "$LINENO" 5
 
 fi
 
@@ -19229,9 +19105,7 @@ if test "$ac_res" != no; then :
 
 else
 
-  { { echo "$as_me:$LINENO: error: unable to find mdb_env_create() function" >&5
-echo "$as_me: error: unable to find mdb_env_create() function" >&2;}
-   { (exit 1); exit 1; }; }
+  as_fn_error $? "unable to find mdb_env_create() function" "$LINENO" 5
 
 fi
 
@@ -19350,146 +19224,10 @@ $as_echo "#define STDC_HEADERS 1" >>confdefs.h
 fi
 
 for ac_header in fcntl.h malloc.h stdlib.h
-do
-as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh`
-if eval "test \"\${$as_ac_Header+set}\" = set"; then
-  echo "$as_me:$LINENO: checking for $ac_header" >&5
-echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6
-if eval "test \"\${$as_ac_Header+set}\" = set"; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
-fi
-echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5
-echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6
-else
-  # Is the header compilable?
-echo "$as_me:$LINENO: checking $ac_header usability" >&5
-echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6
-cat >conftest.$ac_ext <<_ACEOF
-/* confdefs.h.  */
-_ACEOF
-cat confdefs.h >>conftest.$ac_ext
-cat >>conftest.$ac_ext <<_ACEOF
-/* end confdefs.h.  */
-$ac_includes_default
-#include <$ac_header>
-_ACEOF
-rm -f conftest.$ac_objext
-if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5
-  (eval $ac_compile) 2>conftest.er1
-  ac_status=$?
-  grep -v '^ *+' conftest.er1 >conftest.err
-  rm -f conftest.er1
-  cat conftest.err >&5
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; } &&
-	 { ac_try='test -s conftest.$ac_objext'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; }; then
-  ac_header_compiler=yes
-else
-  echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-ac_header_compiler=no
-fi
-rm -f conftest.err conftest.$ac_objext conftest.$ac_ext
-echo "$as_me:$LINENO: result: $ac_header_compiler" >&5
-echo "${ECHO_T}$ac_header_compiler" >&6
-
-# Is the header present?
-echo "$as_me:$LINENO: checking $ac_header presence" >&5
-echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6
-cat >conftest.$ac_ext <<_ACEOF
-/* confdefs.h.  */
-_ACEOF
-cat confdefs.h >>conftest.$ac_ext
-cat >>conftest.$ac_ext <<_ACEOF
-/* end confdefs.h.  */
-#include <$ac_header>
-_ACEOF
-if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5
-  (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1
-  ac_status=$?
-  grep -v '^ *+' conftest.er1 >conftest.err
-  rm -f conftest.er1
-  cat conftest.err >&5
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); } >/dev/null; then
-  if test -s conftest.err; then
-    ac_cpp_err=$ac_c_preproc_warn_flag
-    ac_cpp_err=$ac_cpp_err$ac_c_werror_flag
-  else
-    ac_cpp_err=
-  fi
-else
-  ac_cpp_err=yes
-fi
-if test -z "$ac_cpp_err"; then
-  ac_header_preproc=yes
-else
-  echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-  ac_header_preproc=no
-fi
-rm -f conftest.err conftest.$ac_ext
-echo "$as_me:$LINENO: result: $ac_header_preproc" >&5
-echo "${ECHO_T}$ac_header_preproc" >&6
-
-# So?  What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in
-  yes:no: )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5
-echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;}
-    ac_header_preproc=yes
-    ;;
-  no:yes:* )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5
-echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     check for missing prerequisite headers?" >&5
-echo "$as_me: WARNING: $ac_header:     check for missing prerequisite headers?" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5
-echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&5
-echo "$as_me: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5
-echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;}
-    (
-      cat <<\_ASBOX
-## --------------------------------------------- ##
-## Report this to dev@singa.incubator.apache.org ##
-## --------------------------------------------- ##
-_ASBOX
-    ) |
-      sed "s/^/$as_me: WARNING:     /" >&2
-    ;;
-esac
-echo "$as_me:$LINENO: checking for $ac_header" >&5
-echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6
-if eval "test \"\${$as_ac_Header+set}\" = set"; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
-else
-  eval "$as_ac_Header=\$ac_header_preproc"
-fi
-echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5
-echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6
-
-fi
-if test `eval echo '${'$as_ac_Header'}'` = yes; then
+do :
+  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
+ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
+if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
   cat >>confdefs.h <<_ACEOF
 #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
 _ACEOF
@@ -19727,42 +19465,27 @@ _ACEOF
 
 fi
 
-# So?  What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in
-  yes:no: )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5
-echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;}
-    ac_header_preproc=yes
-    ;;
-  no:yes:* )
-    { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5
-echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     check for missing prerequisite headers?" >&5
-echo "$as_me: WARNING: $ac_header:     check for missing prerequisite headers?" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5
-echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&5
-echo "$as_me: WARNING: $ac_header:     section \"Present But Cannot Be Compiled\"" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5
-echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;}
-    { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5
-echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;}
-    (
-      cat <<\_ASBOX
-## --------------------------------------------- ##
-## Report this to dev@singa.incubator.apache.org ##
-## --------------------------------------------- ##
-_ASBOX
-    ) |
-      sed "s/^/$as_me: WARNING:     /" >&2
-    ;;
-esac
-echo "$as_me:$LINENO: checking for $ac_header" >&5
-echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6
-if eval "test \"\${$as_ac_Header+set}\" = set"; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for working volatile" >&5
+$as_echo_n "checking for working volatile... " >&6; }
+if ${ac_cv_c_volatile+:} false; then :
+  $as_echo_n "(cached) " >&6
+else
+  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h.  */
+
+int
+main ()
+{
+
+volatile int x;
+int * volatile y = (int *) 0;
+return !x && !y;
+  ;
+  return 0;
+}
+_ACEOF
+if ac_fn_c_try_compile "$LINENO"; then :
+  ac_cv_c_volatile=yes
 else
   ac_cv_c_volatile=no
 fi
@@ -20483,18 +20206,10 @@ test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
 # Save the log message, to keep $0 and so on meaningful, and to
 # report actual input values of CONFIG_FILES etc. instead of their
-# values after options handling.  Logging --version etc. is OK.
-exec 5>>config.log
-{
-  echo
-  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
-## Running $as_me. ##
-_ASBOX
-} >&5
-cat >&5 <<_CSEOF
-
+# values after options handling.
+ac_log="
 This file was extended by singa $as_me 0.1, which was
-generated by GNU Autoconf 2.59.  Invocation command line was
+generated by GNU Autoconf 2.69.  Invocation command line was
 
   CONFIG_FILES    = $CONFIG_FILES
   CONFIG_HEADERS  = $CONFIG_HEADERS
@@ -20553,15 +20268,15 @@ $config_headers
 Configuration commands:
 $config_commands
 
-Report bugs to <si...@apache.com>."
+Report bugs to <de...@singa.incubator.apache.org>."
 
 _ACEOF
 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
 ac_cs_version="\\
 singa config.status 0.1
-configured by $0, generated by GNU Autoconf 2.59,
-  with options \\"`echo "$ac_configure_args" | sed 's/[\\""\`\$]/\\\\&/g'`\\"
+configured by $0, generated by GNU Autoconf 2.69,
+  with options \\"\$ac_cs_config\\"
 
 Copyright (C) 2012 Free Software Foundation, Inc.
 This config.status script is free software; the Free Software Foundation

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/include/neuralnet/base_layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/base_layer.h b/include/neuralnet/base_layer.h
index e4fc174..9b8545a 100644
--- a/include/neuralnet/base_layer.h
+++ b/include/neuralnet/base_layer.h
@@ -289,6 +289,10 @@ protected:
  */
 class BridgeSrcLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void SetupAfterPartition();
   virtual void SetupAfterPartition(const LayerProto& proto,
@@ -316,6 +320,10 @@ class BridgeSrcLayer: public Layer {
  */
 class BridgeDstLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void SetupAfterPartition();
   virtual void SetupAfterPartition(const LayerProto& proto,
@@ -342,6 +350,10 @@ class BridgeDstLayer: public Layer {
  */
 class ConcateLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void SetupAfterPartition();
   virtual void SetupAfterPartition(const LayerProto& proto,
@@ -360,6 +372,10 @@ class ConcateLayer: public Layer {
 
 class DataLayer: public Layer{
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void ComputeFeature(bool training, const vector<SLayer>& srclayers)=0;
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers)=0;
   virtual bool is_datalayer() const {
@@ -415,12 +431,22 @@ class DataLayer: public Layer{
  */
 class PrefetchLayer : public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+  using Layer::SetupAfterPartition;
+
   virtual ~PrefetchLayer();
-  virtual void ComputeFeature(bool training, const vector<SLayer>& srclayers);
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
+  virtual void ComputeFeature(bool training, const vector<SLayer>& srclayers);
+  virtual void ComputeGradient(const vector<SLayer>& srclayers){};
+  virtual void SetupAfterPartition(const LayerProto& proto,
+      const vector<int> &shape,
+      const vector<SLayer>& srclayers){}
+
   virtual const Blob<float>& data(const Layer* from) const ;
   virtual Blob<float>* mutable_data(const Layer* layer) ;
-  virtual void ComputeGradient(const vector<SLayer>& srclayers){};
+
   virtual Blob<float>* mutable_grad(const Layer* layer){
     return nullptr;
   }
@@ -428,11 +454,6 @@ class PrefetchLayer : public Layer {
     CHECK(false)<<"Loss layer has not gradient blob";
     return grad_;
   }
-
-  virtual void SetupAfterPartition(const LayerProto& proto,
-      const vector<int> &shape,
-      const vector<SLayer>& srclayers){}
-
   virtual PartitionType partition_type () const {
     return kNone;
   }
@@ -449,20 +470,22 @@ class PrefetchLayer : public Layer {
  */
 class SliceLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
+  virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
+  virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers);
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void SetupAfterPartition();
   virtual void SetupAfterPartition(const LayerProto& proto,
       const vector<int> &shape,
       const vector<SLayer>& srclayers){}
 
-
   virtual const Blob<float>& data(const Layer* layer) const;
   virtual const Blob<float>& grad(const Layer* layer) const;
   virtual Blob<float>* mutable_data(const Layer* layer);
   virtual Blob<float>* mutable_grad(const Layer* layer);
-  virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
-  virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers);
-
  protected:
   int SliceID(const Layer* layer) const;
   vector<Blob<float>> datavec_, gradvec_;
@@ -474,6 +497,10 @@ class SliceLayer: public Layer {
  */
 class SplitLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void SetupAfterPartition();
   virtual void SetupAfterPartition(const LayerProto& proto,
@@ -489,12 +516,15 @@ class SplitLayer: public Layer {
  */
 class LossLayer: public Layer{
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers)=0;
-
   virtual void SetupAfterPartition(const LayerProto& proto,
       const vector<int> &shape,
       const vector<SLayer>& srclayers)=0;
+
   virtual Blob<float>* mutable_grad(const Layer* layer){
     return nullptr;
   }
@@ -505,7 +535,6 @@ class LossLayer: public Layer{
   virtual bool is_losslayer() const {
     return true;
   }
-
   virtual const Blob<float>& metric() const {
     return metric_;
   }
@@ -518,6 +547,11 @@ class LossLayer: public Layer{
  */
 class ParserLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers)=0;
   /**
@@ -545,7 +579,6 @@ class ParserLayer: public Layer {
     if(!has_setup_)
       Setup();
   }
-
   virtual void SetupAfterPartition(const LayerProto& proto,
       const vector<int> &shape,
       const vector<SLayer>& srclayers){}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/include/neuralnet/layer.h
----------------------------------------------------------------------
diff --git a/include/neuralnet/layer.h b/include/neuralnet/layer.h
index 318f295..4a4c307 100644
--- a/include/neuralnet/layer.h
+++ b/include/neuralnet/layer.h
@@ -27,9 +27,13 @@ namespace singa {
  */
 class ConvolutionLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
-
   /**
    * need to reset some properties (e.g., weight matrix) according to
    * shapes (after partition, e.g., partition is done against channel dimension)
@@ -57,6 +61,11 @@ class ConvolutionLayer: public Layer {
 
 class DropoutLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
 
@@ -80,6 +89,11 @@ class DropoutLayer: public Layer {
   */
 class InnerProductLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
 
@@ -112,11 +126,11 @@ class InnerProductLayer: public Layer {
 
 class LabelLayer: public ParserLayer {
  public:
+  using ParserLayer::Setup;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void ParseRecords(bool training, const vector<Record>& records,
       Blob<float>* blob);
-
-
 };
 
 class LRNLayer: public Layer {
@@ -128,6 +142,10 @@ class LRNLayer: public Layer {
  * a_i, the activation (after ReLU) of a neuron convolved with the i-th kernel.
  * b_i, the neuron after normalization, N is the total num of kernels
  */
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
 
  public:
   virtual void Setup(const LayerProto& proto,
@@ -152,6 +170,8 @@ class LRNLayer: public Layer {
 
 class MnistImageLayer: public ParserLayer {
  public:
+  using Layer::Setup;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void ParseRecords(bool training, const vector<Record>& records,
       Blob<float>* blob);
@@ -169,14 +189,16 @@ class MnistImageLayer: public ParserLayer {
 
 class PoolingLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
-
   virtual void SetupAfterPartition(const LayerProto& proto,
       const vector<int> &shape,
       const vector<SLayer>& srclayers);
-
-
   virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
   virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers);
  protected:
@@ -187,14 +209,18 @@ class PoolingLayer: public Layer {
 
 class ReLULayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
-
   virtual void SetupAfterPartition(const LayerProto& proto,
       const vector<int> &shape,
       const vector<SLayer>& srclayers);
 
-
   virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
   virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers);
 };
@@ -205,6 +231,11 @@ class SoftmaxLossLayer: public LossLayer {
    * connected from the label layer and the last fc layer
    */
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
 
@@ -237,6 +268,8 @@ class SoftmaxLossLayer: public LossLayer {
 
 class RGBImageLayer: public ParserLayer {
  public:
+  using Layer::Setup;
+
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
   virtual void ParseRecords(bool training, const vector<Record>& records,
       Blob<float>* blob);
@@ -250,6 +283,10 @@ class RGBImageLayer: public ParserLayer {
 
 class ShardDataLayer: public DataLayer{
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
   virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers){};
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
@@ -258,6 +295,10 @@ class ShardDataLayer: public DataLayer{
 };
 class LMDBDataLayer: public DataLayer{
  public:
+  using Layer::Setup;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void ComputeFeature(bool training, const vector<shared_ptr<Layer>>& srclayers);
   virtual void ComputeGradient(const vector<shared_ptr<Layer>>& srclayers){};
   virtual void Setup(const LayerProto& proto, const vector<SLayer>& srclayers);
@@ -279,6 +320,11 @@ class LMDBDataLayer: public DataLayer{
  */
 class TanhLayer: public Layer {
  public:
+  using Layer::Setup;
+  using Layer::SetupAfterPartition;
+  using Layer::ComputeFeature;
+  using Layer::ComputeGradient;
+
   virtual void Setup(const LayerProto& proto,
       const vector<SLayer>& srclayers);
 

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/include/trainer/worker.h
----------------------------------------------------------------------
diff --git a/include/trainer/worker.h b/include/trainer/worker.h
index 609e7dc..0e9f356 100644
--- a/include/trainer/worker.h
+++ b/include/trainer/worker.h
@@ -169,14 +169,6 @@ class Worker {
   Poller layer_poller_, param_poller_;
 };
 
-class WorkerException: public std::exception{
- public:
-  const char* what() throw(){
-    return "Worker Exception";
-  }
-};
-
-
 class BPWorker: public Worker{
  public:
   ~BPWorker(){}

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/communication/socket.cc
----------------------------------------------------------------------
diff --git a/src/communication/socket.cc b/src/communication/socket.cc
index 279d758..ef6174a 100644
--- a/src/communication/socket.cc
+++ b/src/communication/socket.cc
@@ -26,7 +26,7 @@ Dealer::Dealer(int id):id_(id){
 
 int Dealer::Connect(string endpoint){
   if(endpoint.length())
-    CHECK_EQ(zsock_connect(dealer_,endpoint.c_str()),0);
+    CHECK_EQ(zsock_connect(dealer_,"%s", endpoint.c_str()),0);
   return 1;
 }
 int Dealer::Send(Msg *msg){
@@ -57,7 +57,7 @@ Router::Router(int bufsize){
 }
 int Router::Bind(string endpoint){
   if(endpoint.length())
-    CHECK_EQ(zsock_bind(router_, endpoint.c_str()),0);
+    CHECK_EQ(zsock_bind(router_, "%s", endpoint.c_str()),0);
   return 1;
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/neuralnet/layer.cc
----------------------------------------------------------------------
diff --git a/src/neuralnet/layer.cc b/src/neuralnet/layer.cc
index 03eacc1..25cae42 100644
--- a/src/neuralnet/layer.cc
+++ b/src/neuralnet/layer.cc
@@ -91,7 +91,7 @@ void ConvolutionLayer::ComputeGradient(const vector<SLayer>& srclayers) {
       Shape2(num_filters_, col_height_));
 
   Blob<float>* gsrcblob=srclayers[0]->mutable_grad(this);
-  Tensor<cpu, 4> gsrc(Shape4(batchsize_, channels_, height_, width_));
+  Tensor<cpu, 4> gsrc(nullptr, Shape4(batchsize_, channels_, height_, width_));
   if(gsrcblob!=nullptr)
     gsrc.dptr=gsrcblob->mutable_cpu_data();
   Tensor<cpu, 3> grad(grad_.mutable_cpu_data(),
@@ -579,7 +579,7 @@ void RGBImageLayer::ParseRecords(bool training,
   const SingleLabelImageRecord& r=records.at(0).image();
   Tensor<cpu, 3> raw_image(Shape3(r.shape(0),r.shape(1),r.shape(2)));
   AllocSpace(raw_image);
-  Tensor<cpu, 3> croped_image(Shape3(s[1],s[2],s[3]));
+  Tensor<cpu, 3> croped_image(nullptr, Shape3(s[1],s[2],s[3]));
   if(cropsize_)
     AllocSpace(croped_image);
     //CHECK(std::equal(croped_image.shape(), raw_image.shape());

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/trainer/server.cc
----------------------------------------------------------------------
diff --git a/src/trainer/server.cc b/src/trainer/server.cc
index bf0ad03..f5877c5 100644
--- a/src/trainer/server.cc
+++ b/src/trainer/server.cc
@@ -28,7 +28,6 @@ void Server::Run(){
   ping->set_dst(0,0,kStub);
   ping->set_type(kConnect);
   dealer_->Send(ping);
-  int timeout=Cluster::Get()->server_timeout();
   Poller poller;
   poller.Add(dealer_.get());
 	//start recv loop and process requests

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/trainer/trainer.cc
----------------------------------------------------------------------
diff --git a/src/trainer/trainer.cc b/src/trainer/trainer.cc
index 3621b7e..0a1edc8 100644
--- a/src/trainer/trainer.cc
+++ b/src/trainer/trainer.cc
@@ -8,7 +8,7 @@ using std::map;
 
 namespace singa {
 int ProcsIDOf(int group_id, int id, int flag){
-  int procsid;
+  int procsid=-1;
   auto cluster=Cluster::Get();
   if(flag==kServer){
     procsid=group_id*cluster->nservers_per_group()/
@@ -170,7 +170,6 @@ void Trainer::Run(){
   map<int, shared_ptr<Dealer>> interprocs_dealers;
   Poller poller;
   poller.Add(router.get());
-  int timeout=cluster->stub_timeout();
   while(true){
     Msg* msg=router->Receive();
     if(msg==nullptr){

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/trainer/worker.cc
----------------------------------------------------------------------
diff --git a/src/trainer/worker.cc b/src/trainer/worker.cc
index 6ead6c8..a290996 100644
--- a/src/trainer/worker.cc
+++ b/src/trainer/worker.cc
@@ -9,8 +9,8 @@
 using std::thread;
 namespace singa {
 Worker::Worker( int group_id, int worker_id):
-   group_id_(group_id), worker_id_(worker_id){
-}
+  group_id_(group_id), worker_id_(worker_id){
+  }
 
 void Worker::Setup(const ModelProto& model,
     shared_ptr<NeuralNet> train_net,
@@ -44,13 +44,9 @@ void Worker::Setup(const ModelProto& model,
 void Worker::Run(){
   step_=modelproto_.step();
   Performance perf(train_net_);
-  try{
-    while(!StopNow(step_)){
-      RunOneBatch(step_, &perf);
-      step_++;
-    }
-  }catch(WorkerException& e){
-    LOG(ERROR)<<e.what();
+  while(!StopNow(step_)){
+    RunOneBatch(step_, &perf);
+    step_++;
   }
 }
 int Worker::Put(shared_ptr<Param> param, int step){
@@ -169,9 +165,7 @@ void BPWorker::Forward(shared_ptr<NeuralNet> net, int step,  bool training){
       }
       if(training){
         for(shared_ptr<Param> p: layer->GetParams()){
-          if(Collect(p, step)==0){
-            throw WorkerException();
-          }
+          Collect(p, step);
         }
       }
       //clock_t s=clock();

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/utils/cluster.cc
----------------------------------------------------------------------
diff --git a/src/utils/cluster.cc b/src/utils/cluster.cc
index ac47422..66c4ac8 100644
--- a/src/utils/cluster.cc
+++ b/src/utils/cluster.cc
@@ -12,7 +12,7 @@ Cluster::Cluster(const ClusterProto &cluster, int procs_id) {
   procs_id_=procs_id;
   cluster_ = cluster;
   SetupFolders(cluster);
-  int nprocs;
+  size_t nprocs;
   if(server_worker_separate())
     nprocs=nworker_procs()+nserver_procs();
   else

http://git-wip-us.apache.org/repos/asf/incubator-singa/blob/654d733b/src/utils/updater.cc
----------------------------------------------------------------------
diff --git a/src/utils/updater.cc b/src/utils/updater.cc
index 0b89ee8..ae7d582 100644
--- a/src/utils/updater.cc
+++ b/src/utils/updater.cc
@@ -45,7 +45,7 @@ float Updater::GetLearningRate(int step){
       ret = base * pow(proto_.gamma(), step / freq);
       break;
     case UpdaterProto_ChangeProto_kFixedStep:
-      for(size_t i=0;i<proto_.step_size();i++){
+      for(int i=0;i<proto_.step_size();i++){
         if(step>proto_.step(i))
           ret=proto_.step_lr(i);
       }