val sqlContext = new SQLContext(sc) import sqlContext.implicits._ import org.apache.spark.ml.regression.LinearRegression import org.apache.spark.mllib.util.MLUtils import org.apache.spark.ml.classification.LogisticRegression val data = MLUtils.loadLibSVMFile(sc, "./data/sample_libsvm_data.txt").toDF() val Array(train, test) = data.randomSplit(Array(0.7, 0.3)) // Logistic Regression val logr = new LogisticRegression().setMaxIter(10).setRegParam(0.3).setElasticNetParam(0.8) val modelLog = logr.fit(train) println(s"Weights: ${modelLog.coefficients} \n Intercept: ${modelLog.intercept}") // to get ObjectiveHistory - error of the model in every iteration val summaryLog = modelLog.summary println(s"Error of the model in every iteration: ${summaryLog.objectiveHistory}") // Linear Regression val linearReg = new LinearRegression().setMaxIter(10).setRegParam(0.3).setElasticNetParam(0.8) val modelLR = linearReg.fit(train) println(s"Weights: ${modelLR.coefficients} \n Intercept: ${modelLR.intercept}") val summaryLR = modelLR.summary println(s"Error of the model in every iteration: ${summaryLR.objectiveHistory}")
0 Comments
Leave a Reply. |
Archives
October 2016
Categories
All
|