You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "Subhod Lagade (JIRA)" <ji...@apache.org> on 2015/06/25 13:25:04 UTC
[jira] [Issue Comment Deleted] (SPARK-8627) ALS model predict error
[ https://issues.apache.org/jira/browse/SPARK-8627?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Subhod Lagade updated SPARK-8627:
---------------------------------
Comment: was deleted
(was: can you help me in resolving this ??
usersProducts is a RDD(int,int) it is still giving me error
)
> ALS model predict error
> -----------------------
>
> Key: SPARK-8627
> URL: https://issues.apache.org/jira/browse/SPARK-8627
> Project: Spark
> Issue Type: Bug
> Components: MLlib
> Affects Versions: 1.4.0
> Reporter: Subhod Lagade
>
> /**
> * Created by subhod lagade on 25/06/15.
> */
> import org.apache.spark.SparkConf
> import org.apache.spark.streaming.StreamingContext._
> import org.apache.spark.streaming.{Seconds, StreamingContext}
> import org.apache.spark.streaming._;
> import org.apache.spark.SparkContext
> import org.apache.spark.SparkContext._
> import java.io.BufferedReader;
> import java.io.FileInputStream;
> import java.io.IOException;
> import java.io.InputStreamReader;
> import java.io.PrintStream;
> import java.net.ServerSocket;
> import java.net.Socket;
> import java.util.Properties;
> import org.apache.spark.mllib.recommendation.ALS
> import org.apache.spark.mllib.recommendation.MatrixFactorizationModel
> import org.apache.spark.mllib.recommendation.Rating
> object SparkStreamKafka {
> def main(args: Array[String]) {
>
> val conf = new SparkConf().setAppName("Simple Application");
> val sc = new SparkContext(conf);
> val data = sc.textFile("/home/appadmin/Disney/data.csv");
> val ratings = data.map(_.split(',') match { case Array(user, product, rate) => Rating(user.toInt, product.toInt, rate.toDouble) });
>
>
> val rank = 3;
> val numIterations = 2;
> val model = ALS.train(ratings,rank,numIterations,0.01);
>
> val usersProducts = ratings.map{ case Rating(user, product, rate) => (user, product)}
> // Build the recommendation model using ALS
> usersProducts.foreach(println)
> val predictions = model.predict(usersProducts)
> }
> }
> /*
> ERROR Message
> [ERROR] /home/appadmin/disneypoc/src/main/scala/org/capgemini/SparkKafka.scala:53: error: not enough arguments for method predict: (user: Int, product: Int)Double.
> [INFO] Unspecified value parameter product.
> [INFO] val predictions = model.predict(usersProducts)
> */
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org