You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@mahout.apache.org by td...@apache.org on 2010/09/21 00:26:07 UTC
svn commit: r999145 -
/mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java
Author: tdunning
Date: Mon Sep 20 22:26:07 2010
New Revision: 999145
URL: http://svn.apache.org/viewvc?rev=999145&view=rev
Log:
Freeze survivors of evolution deal with early good results.
Modified:
mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java
Modified: mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java
URL: http://svn.apache.org/viewvc/mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java?rev=999145&r1=999144&r2=999145&view=diff
==============================================================================
--- mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java (original)
+++ mahout/trunk/core/src/main/java/org/apache/mahout/classifier/sgd/AdaptiveLogisticRegression.java Mon Sep 20 22:26:07 2010
@@ -58,6 +58,8 @@ import java.util.concurrent.ExecutionExc
* a different fitness value in non-binary cases.
*/
public class AdaptiveLogisticRegression implements OnlineLearner {
+ private static final int SURVIVORS = 2;
+
private int record = 0;
private int evaluationInterval = 1000;
@@ -122,7 +124,15 @@ public class AdaptiveLogisticRegression
throw new IllegalStateException(e);
}
- ep.mutatePopulation(2);
+ // evolve based on new fitness
+ ep.mutatePopulation(SURVIVORS);
+
+ // now grossly hack the top survivors so they stick around. Set their
+ // mutation rates small and also hack their learning rate to be small
+ // as well.
+ for (State<Wrapper> state : ep.getPopulation().subList(0, SURVIVORS)) {
+ state.getPayload().freeze(state);
+ }
buffer.clear();
}
@@ -293,6 +303,18 @@ public class AdaptiveLogisticRegression
wrapped.decayExponent(0);
}
+ public void freeze(State<Wrapper> s) {
+ // radically decrease learning rate
+ s.getParams()[1] -= 5;
+
+ // and cause evolution to hold (almost)
+ s.setOmni(s.getOmni() / 10);
+ double[] step = s.getStep();
+ for (int i = 0; i < step.length; i++) {
+ step[i] /= 10;
+ }
+ }
+
public void setMappings(State<Wrapper> x) {
int i = 0;
// set the range for regularization (lambda)