Skip to content
Snippets Groups Projects
Commit ccfa362d authored by Xinghao's avatar Xinghao
Browse files

Change *_LocalRandomSGD to *LocalRandomSGD

parent b0bbc7f6
No related branches found
No related tags found
No related merge requests found
...@@ -53,7 +53,7 @@ class LogisticRegressionModel( ...@@ -53,7 +53,7 @@ class LogisticRegressionModel(
} }
} }
class LogisticRegression_LocalRandomSGD private (var stepSize: Double, var miniBatchFraction: Double, class LogisticRegressionLocalRandomSGD private (var stepSize: Double, var miniBatchFraction: Double,
var numIters: Int) var numIters: Int)
extends Logging { extends Logging {
...@@ -138,7 +138,7 @@ class LogisticRegression_LocalRandomSGD private (var stepSize: Double, var miniB ...@@ -138,7 +138,7 @@ class LogisticRegression_LocalRandomSGD private (var stepSize: Double, var miniB
* NOTE(shivaram): We use multiple train methods instead of default arguments to support * NOTE(shivaram): We use multiple train methods instead of default arguments to support
* Java programs. * Java programs.
*/ */
object LogisticRegression_LocalRandomSGD { object LogisticRegressionLocalRandomSGD {
/** /**
* Train a logistic regression model given an RDD of (label, features) pairs. We run a fixed number * Train a logistic regression model given an RDD of (label, features) pairs. We run a fixed number
...@@ -163,7 +163,7 @@ object LogisticRegression_LocalRandomSGD { ...@@ -163,7 +163,7 @@ object LogisticRegression_LocalRandomSGD {
initialWeights: Array[Double]) initialWeights: Array[Double])
: LogisticRegressionModel = : LogisticRegressionModel =
{ {
new LogisticRegression_LocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input, initialWeights) new LogisticRegressionLocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input, initialWeights)
} }
/** /**
...@@ -185,7 +185,7 @@ object LogisticRegression_LocalRandomSGD { ...@@ -185,7 +185,7 @@ object LogisticRegression_LocalRandomSGD {
miniBatchFraction: Double) miniBatchFraction: Double)
: LogisticRegressionModel = : LogisticRegressionModel =
{ {
new LogisticRegression_LocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input) new LogisticRegressionLocalRandomSGD(stepSize, miniBatchFraction, numIterations).train(input)
} }
/** /**
...@@ -233,7 +233,7 @@ object LogisticRegression_LocalRandomSGD { ...@@ -233,7 +233,7 @@ object LogisticRegression_LocalRandomSGD {
} }
val sc = new SparkContext(args(0), "LogisticRegression") val sc = new SparkContext(args(0), "LogisticRegression")
val data = MLUtils.loadLabeledData(sc, args(1)) val data = MLUtils.loadLabeledData(sc, args(1))
val model = LogisticRegression_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble) val model = LogisticRegressionLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
sc.stop() sc.stop()
} }
......
...@@ -53,7 +53,7 @@ class SVMModel( ...@@ -53,7 +53,7 @@ class SVMModel(
class SVM_LocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double, class SVMLocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
var numIters: Int) var numIters: Int)
extends Logging { extends Logging {
...@@ -138,7 +138,7 @@ class SVM_LocalRandomSGD private (var stepSize: Double, var regParam: Double, va ...@@ -138,7 +138,7 @@ class SVM_LocalRandomSGD private (var stepSize: Double, var regParam: Double, va
*/ */
object SVM_LocalRandomSGD { object SVMLocalRandomSGD {
/** /**
* Train a SVM model given an RDD of (label, features) pairs. We run a fixed number * Train a SVM model given an RDD of (label, features) pairs. We run a fixed number
...@@ -163,7 +163,7 @@ object SVM_LocalRandomSGD { ...@@ -163,7 +163,7 @@ object SVM_LocalRandomSGD {
initialWeights: Array[Double]) initialWeights: Array[Double])
: SVMModel = : SVMModel =
{ {
new SVM_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights) new SVMLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
} }
/** /**
...@@ -185,7 +185,7 @@ object SVM_LocalRandomSGD { ...@@ -185,7 +185,7 @@ object SVM_LocalRandomSGD {
miniBatchFraction: Double) miniBatchFraction: Double)
: SVMModel = : SVMModel =
{ {
new SVM_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input) new SVMLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
} }
/** /**
...@@ -233,7 +233,7 @@ object SVM_LocalRandomSGD { ...@@ -233,7 +233,7 @@ object SVM_LocalRandomSGD {
} }
val sc = new SparkContext(args(0), "SVM") val sc = new SparkContext(args(0), "SVM")
val data = MLUtils.loadLabeledData(sc, args(1)) val data = MLUtils.loadLabeledData(sc, args(1))
val model = SVM_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble) val model = SVMLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
sc.stop() sc.stop()
} }
......
...@@ -53,7 +53,7 @@ class LassoModel( ...@@ -53,7 +53,7 @@ class LassoModel(
} }
class Lasso_LocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double, class LassoLocalRandomSGD private (var stepSize: Double, var regParam: Double, var miniBatchFraction: Double,
var numIters: Int) var numIters: Int)
extends Logging { extends Logging {
...@@ -138,7 +138,7 @@ class Lasso_LocalRandomSGD private (var stepSize: Double, var regParam: Double, ...@@ -138,7 +138,7 @@ class Lasso_LocalRandomSGD private (var stepSize: Double, var regParam: Double,
* *
* *
*/ */
object Lasso_LocalRandomSGD { object LassoLocalRandomSGD {
/** /**
* Train a Lasso model given an RDD of (label, features) pairs. We run a fixed number * Train a Lasso model given an RDD of (label, features) pairs. We run a fixed number
...@@ -163,7 +163,7 @@ object Lasso_LocalRandomSGD { ...@@ -163,7 +163,7 @@ object Lasso_LocalRandomSGD {
initialWeights: Array[Double]) initialWeights: Array[Double])
: LassoModel = : LassoModel =
{ {
new Lasso_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights) new LassoLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input, initialWeights)
} }
/** /**
...@@ -185,7 +185,7 @@ object Lasso_LocalRandomSGD { ...@@ -185,7 +185,7 @@ object Lasso_LocalRandomSGD {
miniBatchFraction: Double) miniBatchFraction: Double)
: LassoModel = : LassoModel =
{ {
new Lasso_LocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input) new LassoLocalRandomSGD(stepSize, regParam, miniBatchFraction, numIterations).train(input)
} }
/** /**
...@@ -233,7 +233,7 @@ object Lasso_LocalRandomSGD { ...@@ -233,7 +233,7 @@ object Lasso_LocalRandomSGD {
} }
val sc = new SparkContext(args(0), "Lasso") val sc = new SparkContext(args(0), "Lasso")
val data = MLUtils.loadLabeledData(sc, args(1)) val data = MLUtils.loadLabeledData(sc, args(1))
val model = Lasso_LocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble) val model = LassoLocalRandomSGD.train(data, args(4).toInt, args(2).toDouble, args(3).toDouble)
sc.stop() sc.stop()
} }
......
<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
package spark.mllib.classification
=======
/* /*
* Licensed to the Apache Software Foundation (ASF) under one or more * Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with * contributor license agreements. See the NOTICE file distributed with
...@@ -18,8 +15,7 @@ package spark.mllib.classification ...@@ -18,8 +15,7 @@ package spark.mllib.classification
* limitations under the License. * limitations under the License.
*/ */
package spark.mllib.regression package spark.mllib.classification
>>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
import scala.util.Random import scala.util.Random
...@@ -37,13 +33,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll { ...@@ -37,13 +33,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
System.clearProperty("spark.driver.port") System.clearProperty("spark.driver.port")
} }
<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala
// Test if we can correctly learn A, B where Y = logistic(A + B*X)
test("LogisticRegression_LocalRandomSGD") {
val nPoints = 10000
val rnd = new Random(42)
=======
// Generate input of the form Y = logistic(offset + scale*X) // Generate input of the form Y = logistic(offset + scale*X)
def generateLogisticInput( def generateLogisticInput(
offset: Double, offset: Double,
...@@ -51,7 +40,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll { ...@@ -51,7 +40,6 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
nPoints: Int, nPoints: Int,
seed: Int): Seq[(Double, Array[Double])] = { seed: Int): Seq[(Double, Array[Double])] = {
val rnd = new Random(seed) val rnd = new Random(seed)
>>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
val x1 = Array.fill[Double](nPoints)(rnd.nextGaussian()) val x1 = Array.fill[Double](nPoints)(rnd.nextGaussian())
// NOTE: if U is uniform[0, 1] then ln(u) - ln(1-u) is Logistic(0,1) // NOTE: if U is uniform[0, 1] then ln(u) - ln(1-u) is Logistic(0,1)
...@@ -91,12 +79,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll { ...@@ -91,12 +79,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
val testRDD = sc.parallelize(testData, 2) val testRDD = sc.parallelize(testData, 2)
testRDD.cache() testRDD.cache()
<<<<<<< HEAD:mllib/src/test/scala/spark/mllib/classification/LogisticRegressionSuite.scala val lr = new LogisticRegressionLocalRandomSGD().setStepSize(10.0).setNumIterations(20)
val lr = new LogisticRegression_LocalRandomSGD().setStepSize(10.0)
.setNumIterations(20)
=======
val lr = new LogisticRegression().setStepSize(10.0).setNumIterations(20)
>>>>>>> FETCH_HEAD:mllib/src/test/scala/spark/mllib/regression/LogisticRegressionSuite.scala
val model = lr.train(testRDD) val model = lr.train(testRDD)
...@@ -128,7 +111,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll { ...@@ -128,7 +111,7 @@ class LogisticRegressionSuite extends FunSuite with BeforeAndAfterAll {
testRDD.cache() testRDD.cache()
// Use half as many iterations as the previous test. // Use half as many iterations as the previous test.
val lr = new LogisticRegression().setStepSize(10.0).setNumIterations(10) val lr = new LogisticRegressionLocalRandomSGD().setStepSize(10.0).setNumIterations(10)
val model = lr.train(testRDD, initialWeights) val model = lr.train(testRDD, initialWeights)
......
...@@ -19,7 +19,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll { ...@@ -19,7 +19,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
System.clearProperty("spark.driver.port") System.clearProperty("spark.driver.port")
} }
test("SVM_LocalRandomSGD") { test("SVMLocalRandomSGD") {
val nPoints = 10000 val nPoints = 10000
val rnd = new Random(42) val rnd = new Random(42)
...@@ -46,7 +46,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll { ...@@ -46,7 +46,7 @@ class SVMSuite extends FunSuite with BeforeAndAfterAll {
writer_data.write("\n")}) writer_data.write("\n")})
writer_data.close() writer_data.close()
val svm = new SVM_LocalRandomSGD().setStepSize(1.0) val svm = new SVMLocalRandomSGD().setStepSize(1.0)
.setRegParam(1.0) .setRegParam(1.0)
.setNumIterations(100) .setNumIterations(100)
......
...@@ -17,7 +17,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll { ...@@ -17,7 +17,7 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
System.clearProperty("spark.driver.port") System.clearProperty("spark.driver.port")
} }
test("Lasso_LocalRandomSGD") { test("LassoLocalRandomSGD") {
val nPoints = 10000 val nPoints = 10000
val rnd = new Random(42) val rnd = new Random(42)
...@@ -36,14 +36,14 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll { ...@@ -36,14 +36,14 @@ class LassoSuite extends FunSuite with BeforeAndAfterAll {
val testRDD = sc.parallelize(testData, 2) val testRDD = sc.parallelize(testData, 2)
testRDD.cache() testRDD.cache()
val ls = new Lasso_LocalRandomSGD().setStepSize(1.0) val ls = new LassoLocalRandomSGD().setStepSize(1.0)
.setRegParam(0.01) .setRegParam(0.01)
.setNumIterations(20) .setNumIterations(20)
val model = ls.train(testRDD) val model = ls.train(testRDD)
val weight0 = model.weights.get(0) val weight0 = model.weights(0)
val weight1 = model.weights.get(1) val weight1 = model.weights(1)
assert(weight0 >= -1.60 && weight0 <= -1.40, weight0 + " not in [-1.6, -1.4]") assert(weight0 >= -1.60 && weight0 <= -1.40, weight0 + " not in [-1.6, -1.4]")
assert(weight1 >= -1.0e-3 && weight1 <= 1.0e-3, weight1 + " not in [-0.001, 0.001]") assert(weight1 >= -1.0e-3 && weight1 <= 1.0e-3, weight1 + " not in [-0.001, 0.001]")
assert(model.intercept >= 1.9 && model.intercept <= 2.1, model.intercept + " not in [1.9, 2.1]") assert(model.intercept >= 1.9 && model.intercept <= 2.1, model.intercept + " not in [1.9, 2.1]")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment