You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mxnet.apache.org by GitBox <gi...@apache.org> on 2017/11/12 19:12:07 UTC

[GitHub] ZiyueHuang closed pull request #8626: add group2ctxs to movie factorization machine

ZiyueHuang closed pull request #8626: add group2ctxs to movie factorization machine
URL: https://github.com/apache/incubator-mxnet/pull/8626
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/example/sparse/matrix_fact_model.py b/example/sparse/matrix_fact_model.py
index d2d8de5dd3..194773f9ca 100644
--- a/example/sparse/matrix_fact_model.py
+++ b/example/sparse/matrix_fact_model.py
@@ -18,34 +18,36 @@
 import mxnet as mx
 
 def matrix_fact_net(factor_size, num_hidden, max_user, max_item, sparse_embed=True):
-    # input
-    user = mx.symbol.Variable('user')
-    item = mx.symbol.Variable('item')
-    score = mx.symbol.Variable('score')
-    if sparse_embed:
-        # user feature lookup
-        user_weight = mx.symbol.Variable('user_weight', stype='row_sparse')
-        user = mx.symbol.contrib.SparseEmbedding(data=user, weight=user_weight,
-                                                 input_dim=max_user, output_dim=factor_size)
-        # item feature lookup
-        item_weight = mx.symbol.Variable('item_weight', stype='row_sparse')
-        item = mx.symbol.contrib.SparseEmbedding(data=item, weight=item_weight,
-                                                 input_dim=max_item, output_dim=factor_size)
-    else:
-        # user feature lookup
-        user = mx.symbol.Embedding(data=user, input_dim=max_user, output_dim=factor_size)
-        # item feature lookup
-        item = mx.symbol.Embedding(data=item, input_dim=max_item, output_dim=factor_size)
-    # non-linear transformation of user features
-    user = mx.symbol.Activation(data=user, act_type='relu')
-    user = mx.symbol.FullyConnected(data=user, num_hidden=num_hidden)
-    # non-linear transformation of item features
-    item = mx.symbol.Activation(data=item, act_type='relu')
-    item = mx.symbol.FullyConnected(data=item, num_hidden=num_hidden)
-    # predict by the inner product, which is elementwise product and then sum
-    pred = user * item
-    pred = mx.symbol.sum(data=pred, axis = 1)
-    pred = mx.symbol.Flatten(data=pred)
-    # loss layer
-    pred = mx.symbol.LinearRegressionOutput(data=pred, label=score)
+    with mx.AttrScope(ctx_group='dev1'):
+        # input
+        user = mx.symbol.Variable('user')
+        item = mx.symbol.Variable('item')
+        if sparse_embed:
+            # user feature lookup
+            user_weight = mx.symbol.Variable('user_weight', stype='row_sparse')
+            user = mx.symbol.contrib.SparseEmbedding(data=user, weight=user_weight,
+                                                     input_dim=max_user, output_dim=factor_size)
+            # item feature lookup
+            item_weight = mx.symbol.Variable('item_weight', stype='row_sparse')
+            item = mx.symbol.contrib.SparseEmbedding(data=item, weight=item_weight,
+                                                     input_dim=max_item, output_dim=factor_size)
+        else:
+            # user feature lookup
+            user = mx.symbol.Embedding(data=user, input_dim=max_user, output_dim=factor_size)
+            # item feature lookup
+            item = mx.symbol.Embedding(data=item, input_dim=max_item, output_dim=factor_size)
+        # non-linear transformation of user features
+        user = mx.symbol.Activation(data=user, act_type='relu')
+        user = mx.symbol.FullyConnected(data=user, num_hidden=num_hidden)
+        # non-linear transformation of item features
+        item = mx.symbol.Activation(data=item, act_type='relu')
+        item = mx.symbol.FullyConnected(data=item, num_hidden=num_hidden)
+    with mx.AttrScope(ctx_group='dev2'):
+        # predict by the inner product, which is elementwise product and then sum
+        pred = user * item
+        pred = mx.symbol.sum(data=pred, axis = 1)
+        pred = mx.symbol.Flatten(data=pred)
+        score = mx.symbol.Variable('score')
+        # loss layer
+        pred = mx.symbol.LinearRegressionOutput(data=pred, label=score)
     return pred
diff --git a/example/sparse/matrix_factorization.py b/example/sparse/matrix_factorization.py
index cdb61643d3..5b6fba068f 100644
--- a/example/sparse/matrix_factorization.py
+++ b/example/sparse/matrix_factorization.py
@@ -36,6 +36,8 @@
                     help="the factor size of the embedding operation")
 parser.add_argument('--use-dense', action='store_true',
                     help="use the dense embedding operator")
+parser.add_argument('--use-gpu', action='store_true',
+                    help="use gpu")
 parser.add_argument('--dummy-iter', action='store_true',
                     help="use the dummy data iterator for speed test")
 
@@ -64,6 +66,7 @@
 
     momentum = 0.9
     ctx = mx.cpu(0)
+    dev_ctx = mx.gpu(0) if args.use_gpu else mx.cpu(0)
     learning_rate = 0.1
 
     # prepare dataset and iterators
@@ -79,7 +82,7 @@
 
     # initialize the module
     mod = mx.module.Module(symbol=net, context=ctx, data_names=['user', 'item'],
-                           label_names=['score'])
+                           label_names=['score'], group2ctxs=[{'dev1': ctx, 'dev2': dev_ctx}])
     mod.bind(data_shapes=train_iter.provide_data, label_shapes=train_iter.provide_label)
     mod.init_params(initializer=mx.init.Xavier(factor_type="in", magnitude=2.34))
     optim = mx.optimizer.create(optimizer, learning_rate=learning_rate, momentum=momentum,


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services