You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ct...@apache.org on 2018/10/22 23:35:00 UTC

[15/50] [abbrv] lucene-solr:jira/solr-12746: SOLR-12780: Add support for Leaky ReLU and TanH activations in contrib/ltr NeuralNetworkModel class. (Kamuela Lau, Christine Poerschke)

SOLR-12780: Add support for Leaky ReLU and TanH activations in contrib/ltr NeuralNetworkModel class.
(Kamuela Lau, Christine Poerschke)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/5628432b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/5628432b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/5628432b

Branch: refs/heads/jira/solr-12746
Commit: 5628432b001fb8b5ff81d206c8cf8b2e1e848db7
Parents: db1344f
Author: Christine Poerschke <cp...@apache.org>
Authored: Fri Oct 12 17:08:35 2018 +0100
Committer: Cassandra Targett <ct...@apache.org>
Committed: Sun Oct 21 15:46:46 2018 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 ++
 .../solr/ltr/model/NeuralNetworkModel.java      | 34 ++++++++++++++++++--
 2 files changed, 34 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5628432b/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 628abb8..3a58a68 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -144,6 +144,9 @@ New Features
 
 * SOLR-12843: Implement a MultiContentWriter in SolrJ to post multiple files/payload at once (noble)
 
+* SOLR-12780: Add support for Leaky ReLU and TanH activations in contrib/ltr NeuralNetworkModel class.
+  (Kamuela Lau, Christine Poerschke)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/5628432b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
----------------------------------------------------------------------
diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
index 798b81c..fa92374 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
@@ -31,7 +31,7 @@ import org.apache.solr.util.SolrPluginUtils;
  * A scoring model that computes document scores using a neural network.
  * <p>
  * Supported <a href="https://en.wikipedia.org/wiki/Activation_function">activation functions</a> are:
- * <code>identity</code>, <code>relu</code>, <code>sigmoid</code> and
+ * <code>identity</code>, <code>relu</code>, <code>sigmoid</code>, <code>tanh</code>, <code>leakyrelu</code> and
  * contributions to support additional activation functions are welcome.
  * <p>
  * Example configuration:
@@ -60,8 +60,20 @@ import org.apache.solr.util.SolrPluginUtils;
                 "activation" : "relu"
             },
             {
-                "matrix" : [ [ 27.0, 28.0 ] ],
-                "bias" : [ 29.0 ],
+                "matrix" : [ [ 27.0, 28.0 ],
+                             [ 29.0, 30.0 ] ],
+                "bias" : [ 31.0, 32.0 ],
+                "activation" : "leakyrelu"
+            },
+            {
+                "matrix" : [ [ 33.0, 34.0 ],
+                             [ 35.0, 36.0 ] ],
+                "bias" : [ 37.0, 38.0 ],
+                "activation" : "tanh"
+            },
+            {
+                "matrix" : [ [ 39.0, 40.0 ] ],
+                "bias" : [ 41.0 ],
                 "activation" : "identity"
             }
         ]
@@ -144,6 +156,22 @@ public class NeuralNetworkModel extends LTRScoringModel {
             }
           };
           break;
+        case "leakyrelu":
+          this.activation = new Activation() {
+            @Override
+            public float apply(float in) {
+              return in < 0 ? 0.01f * in : in;
+            }
+          };
+          break;
+        case "tanh":
+          this.activation = new Activation() {
+            @Override
+            public float apply(float in) {
+              return (float)Math.tanh(in);
+            }
+          };
+          break;
         case "sigmoid":
           this.activation = new Activation() {
             @Override