From ccfa362ddec1bc942785798dea41c3aac52df60f Mon Sep 17 00:00:00 2001
From: Xinghao <pxinghao@gmail.com>
Date: Sun, 28 Jul 2013 10:33:57 -0700
Subject: [PATCH] Change *_LocalRandomSGD to *LocalRandomSGD

---
 .../classification/LogisticRegression.scala   | 10 ++++----
 .../spark/mllib/classification/SVM.scala      | 10 ++++----
 .../scala/spark/mllib/regression/Lasso.scala  | 10 ++++----
 .../LogisticRegressionSuite.scala             | 23 +++----------------
 .../spark/mllib/classification/SVMSuite.scala |  4 ++--
 .../spark/mllib/regression/LassoSuite.scala   |  8 +++----
 6 files changed, 24 insertions(+), 41 deletions(-)

diff --git a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala b/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
index 40b96fbe3a..1b093187f2 100644
--- a/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
+++ b/mllib/src/main/scala/spark/mllib/classification/LogisticRegression.scala
@@ -53,7 +53,7 @@ class LogisticRegressionModel(
   }
 }
 
-class LogisticRegression_LocalRandomSGD private (var stepSize: Double, var miniBatchFraction: Double,
+class LogisticRegressionLocalRandomSGD private (var stepSize: Double, var miniBatchFraction: Double,
     var numIters: Int)
   extends Logging {
 
@@ -138,7 +138,7 @@ class LogisticRegression_LocalRandomSGD private (var stepSize: Double, var miniB
  * NOTE(shivaram): We use multiple train methods instead of default arguments to support 
  *                 Java programs.
  */
-object LogisticRegression_LocalRandomSGD {
+object LogisticRegressionLocalRandomSGD {
 
   /**
    * Train a logistic regression model given an RDD of (label, features) pairs. We run a fixed number
@@ -163,7 +163,7 @@ object LogisticRegression_LocalRandomSGD {
       initialWeights: Array[Double])
     : LogisticRegressionModel =
   {
-    new LogisticRegression_LocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input, initialWeights)
+    new LogisticRegressionLocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input, initialWeights)
   }
 
   /**
@@ -185,7 +185,7 @@ object LogisticRegression_LocalRandomSGD {
       miniBatchFraction: Double)
     : LogisticRegressionModel =
   {
-    new LogisticRegression_LocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input)
+    new LogisticRegressionLocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input)
   }
 
   /**
@@ -233,7 +233,7 @@ object LogisticRegression_LocalRandomSGD {
     }
     val sc = new SparkContext(args(0), "LogisticRegression")
     val data = MLUtils.loadLabeledData(sc, args(1))
-    val model = LogisticRegression_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
+    val model = LogisticRegressionLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
 
     sc.stop()
   }
diff --git a/mllib/src/main/scala/spark/mllib/classification/SVM.scala b/mllib/src/main/scala/spark/mllib/classification/SVM.scala
index 2cd1d668eb..76844f6b9c 100644
--- a/mllib/src/main/scala/spark/mllib/classification/SVM.scala
+++ b/mllib/src/main/scala/spark/mllib/classification/SVM.scala
@@ -53,7 +53,7 @@ class SVMModel(
 
 
 
-class SVM_LocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
+class SVMLocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
     var numIters: Int)
   extends Logging {
 
@@ -138,7 +138,7 @@ class SVM_LocalRandomSGD private (var stepSize: Double, var regParam: Double, va
 
 
  */
-object SVM_LocalRandomSGD {
+object SVMLocalRandomSGD {
 
   /**
    * Train a SVM model given an RDD of (label, features) pairs. We run a fixed number
@@ -163,7 +163,7 @@ object SVM_LocalRandomSGD {
       initialWeights: Array[Double])
     : SVMModel =
   {
-    new SVM_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
+    new SVMLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
   }
 
   /**
@@ -185,7 +185,7 @@ object SVM_LocalRandomSGD {
       miniBatchFraction: Double)
     : SVMModel =
   {
-    new SVM_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
+    new SVMLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
   }
 
   /**
@@ -233,7 +233,7 @@ object SVM_LocalRandomSGD {
     }
     val sc = new SparkContext(args(0), "SVM")
     val data = MLUtils.loadLabeledData(sc, args(1))
-    val model = SVM_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
+    val model = SVMLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
 
     sc.stop()
   }
diff --git a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala b/mllib/src/main/scala/spark/mllib/regression/Lasso.scala
index 64364323a2..1952658bb2 100644
--- a/mllib/src/main/scala/spark/mllib/regression/Lasso.scala
+++ b/mllib/src/main/scala/spark/mllib/regression/Lasso.scala
@@ -53,7 +53,7 @@ class LassoModel(
 }
 
 
-class Lasso_LocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
+class LassoLocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
     var numIters: Int)
   extends Logging {
 
@@ -138,7 +138,7 @@ class Lasso_LocalRandomSGD private (var stepSize: Double, var regParam: Double,
  *
  *
  */
-object Lasso_LocalRandomSGD {
+object LassoLocalRandomSGD {
 
   /**
    * Train a Lasso model given an RDD of (label, features) pairs. We run a fixed number
@@ -163,7 +163,7 @@ object Lasso_LocalRandomSGD {
       initialWeights: Array[Double])
     : LassoModel =
   {
-    new Lasso_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
+    new LassoLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
   }
 
   /**
@@ -185,7 +185,7 @@ object Lasso_LocalRandomSGD {
       miniBatchFraction: Double)
     : LassoModel =
   {
-    new Lasso_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
+    new LassoLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
   }
 
   /**
@@ -233,7 +233,7 @@ object Lasso_LocalRandomSGD {
     }
     val sc = new SparkContext(args(0), "Lasso")
     val data = MLUtils.loadLabeledData(sc, args(1))
-    val model = Lasso_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
+    val model = LassoLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
 
     sc.stop()
   }
diff --git a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala b/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
index 827ca66330..144b8b1bc7 100644
--- a/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
@@ -1,6 +1,3 @@
-<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
-package spark.mllib.classification
-=======
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
@@ -18,8 +15,7 @@ package spark.mllib.classification
  * limitations under the License.
  */
 
-package spark.mllib.regression
->>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
+package spark.mllib.classification
 
 import scala.util.Random
 
@@ -37,13 +33,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
     System.clearProperty("spark.driver.port")
   }
 
-<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
-  // Test if we can correctly learn A, B where Y = logistic(A + B*X)
-  test("LogisticRegression_LocalRandomSGD") {
-    val nPoints = 10000
-    val rnd = new Random(42)
-
-=======
   // Generate input of the form Y = logistic(offset + scale*X)
   def generateLogisticInput(
       offset: Double,
@@ -51,7 +40,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
       nPoints: Int,
       seed: Int): Seq[(Double, Array[Double])]  = {
     val rnd = new Random(seed)
->>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
     val x1 = Array.fill[Double](nPoints)(rnd.nextGaussian())
 
     // NOTE: if U is uniform[0, 1] then ln(u) - ln(1-u) is Logistic(0,1)
@@ -91,12 +79,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
 
     val testRDD = sc.parallelize(testData, 2)
     testRDD.cache()
-<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
-    val lr = new LogisticRegression_LocalRandomSGD().setStepSize(10.0)
-                                     .setNumIterations(20)
-=======
-    val lr = new LogisticRegression().setStepSize(10.0).setNumIterations(20)
->>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
+    val lr = new LogisticRegressionLocalRandomSGD().setStepSize(10.0).setNumIterations(20)
 
     val model = lr.train(testRDD)
 
@@ -128,7 +111,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
     testRDD.cache()
 
     // Use half as many iterations as the previous test.
-    val lr = new LogisticRegression().setStepSize(10.0).setNumIterations(10)
+    val lr = new LogisticRegressionLocalRandomSGD().setStepSize(10.0).setNumIterations(10)
 
     val model = lr.train(testRDD, initialWeights)
 
diff --git a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala b/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
index 50cf260f49..0d781c310c 100644
--- a/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/classification/SVMSuite.scala
@@ -19,7 +19,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
     System.clearProperty("spark.driver.port")
   }
 
-  test("SVM_LocalRandomSGD") {
+  test("SVMLocalRandomSGD") {
     val nPoints = 10000
     val rnd = new Random(42)
 
@@ -46,7 +46,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
       writer_data.write("\n")})
     writer_data.close()
 
-    val svm = new SVM_LocalRandomSGD().setStepSize(1.0)
+    val svm = new SVMLocalRandomSGD().setStepSize(1.0)
                       .setRegParam(1.0)
                       .setNumIterations(100)
 
diff --git a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala b/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
index 9836ac54c1..0c39e1e09b 100644
--- a/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
+++ b/mllib/src/test/scala/spark/mllib/regression/LassoSuite.scala
@@ -17,7 +17,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
     System.clearProperty("spark.driver.port")
   }
 
-  test("Lasso_LocalRandomSGD") {
+  test("LassoLocalRandomSGD") {
     val nPoints = 10000
     val rnd = new Random(42)
 
@@ -36,14 +36,14 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
 
     val testRDD = sc.parallelize(testData, 2)
     testRDD.cache()
-    val ls = new Lasso_LocalRandomSGD().setStepSize(1.0)
+    val ls = new LassoLocalRandomSGD().setStepSize(1.0)
                         .setRegParam(0.01)
                                      .setNumIterations(20)
 
     val model = ls.train(testRDD)
 
-    val weight0 = model.weights.get(0)
-    val weight1 = model.weights.get(1)
+    val weight0 = model.weights(0)
+    val weight1 = model.weights(1)
     assert(weight0 >= -1.60 && weight0 <= -1.40, weight0 + " not in [-1.6, -1.4]")
     assert(weight1 >= -1.0e-3 && weight1 <= 1.0e-3, weight1 + " not in [-0.001, 0.001]")
     assert(model.intercept >= 1.9 && model.intercept <= 2.1, model.intercept + " not in [1.9, 2.1]")
-- 
GitLab