You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by zh...@apache.org on 2018/08/15 07:00:13 UTC

[incubator-mxnet] branch master updated: Add worker_fn argument to multiworker function (#12177)

This is an automated email from the ASF dual-hosted git repository.

zhreshold pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git


The following commit(s) were added to refs/heads/master by this push:
     new cd6b1cb  Add worker_fn argument to multiworker function (#12177)
cd6b1cb is described below

commit cd6b1cb5a6b4651f2b6fb64575d884a6fa3ef545
Author: Shuai Zheng <sz...@users.noreply.github.com>
AuthorDate: Wed Aug 15 14:59:58 2018 +0800

    Add worker_fn argument to multiworker function (#12177)
    
    * add worker_fn argument to multiworker function
    
    * fix pylin
---
 python/mxnet/gluon/data/dataloader.py | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/python/mxnet/gluon/data/dataloader.py b/python/mxnet/gluon/data/dataloader.py
index e0b6aec..412d313 100644
--- a/python/mxnet/gluon/data/dataloader.py
+++ b/python/mxnet/gluon/data/dataloader.py
@@ -183,7 +183,8 @@ def fetcher_loop(data_queue, data_buffer, pin_memory=False):
 
 class _MultiWorkerIter(object):
     """Interal multi-worker iterator for DataLoader."""
-    def __init__(self, num_workers, dataset, batchify_fn, batch_sampler, pin_memory=False):
+    def __init__(self, num_workers, dataset, batchify_fn, batch_sampler, pin_memory=False,
+                 worker_fn=worker_loop):
         assert num_workers > 0, "_MultiWorkerIter is not for {} workers".format(num_workers)
         self._num_workers = num_workers
         self._dataset = dataset
@@ -200,7 +201,7 @@ class _MultiWorkerIter(object):
         workers = []
         for _ in range(self._num_workers):
             worker = multiprocessing.Process(
-                target=worker_loop,
+                target=worker_fn,
                 args=(self._dataset, self._key_queue, self._data_queue, self._batchify_fn))
             worker.daemon = True
             worker.start()