數據格式:
8,1 5 2
125,90 30 5
0,0 0 0
92,22 50 20
999,333 333 333
50,12 26 12
60,25 25 10
8,1 5 2
867,537 97 233
672,55 216 401
1672,521 576 575
2229,699 799 731
1279,695 434 150
package com.agm.lgstc
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.regression.LinearRegressionModel
import org.apache.spark.mllib.regression.LinearRegressionWithSGD
import org.apache.spark.mllib.linalg.Vectors
import java.io._
import org.apache.log4j.{Level, Logger}
object funpredict {
def main(args: Array[String]) {
val rand = new util.Random
var one = rand.nextInt(900)
var two = rand.nextInt(900)
var three = rand.nextInt(900)
var add = one+two+three
for(i<-0 to 150){
one = rand.nextInt(900)
two = rand.nextInt(900)
three = rand.nextInt(900)
add = one+two+three
// println(add+","+one+" "+two+" "+three)
}
Logger.getLogger("org").setLevel(Level.ERROR)
val path = new File(".").getCanonicalPath()
System.getProperties().put("hadoop.home.dir", path);
new File("./bin").mkdirs();
new File("./bin/winutils.exe").createNewFile();
val conf: SparkConf = new SparkConf().setAppName("LinearRegression").setMaster("local")
val sc = new SparkContext(conf)
// Load and parse the data
val data = sc.textFile("F:\\testData\\spark\\funpredict.txt")
val parsedData = data.map { line =>
val parts = line.split(',')
LabeledPoint(parts(0).toDouble, Vectors.dense(parts(1).split(' ').map(_.toDouble)))
}.cache()
// parsedData.foreach(println)
// Building the model
val numIterations = 200
val stepSize = 0.00001
val model = LinearRegressionWithSGD.train(parsedData, numIterations,stepSize)
// var lr = new LinearRegressionWithSGD().setIntercept(true)
// val model = lr.run(parsedData)
//獲取特徵權重,及干擾特徵
println("weights:%s, intercept:%s".format(model.weights,model.intercept))
//println(model.toPMML())
// Evaluate model on training examples and compute training error
val valuesAndPreds = parsedData.map { point =>
val prediction = model.predict(point.features)
(point.label, prediction)
}
println(model.predict(Vectors.dense(50,50,78)))
//計算 均方誤差
val MSE = valuesAndPreds.map{case(v, p) => math.pow((v - p), 2)}.mean()
println("training Mean Squared Error = " + MSE)
// Save and load model
// model.save(sc, "myModelPath")
// val sameModel = LinearRegressionModel.load(sc, "myModelPath")
}
}