【Breeze】【Scala】基于梯度下降的简单逻辑回归编程实现 Logistic Regression - GD

来源:互联网 发布:ems打印软件 编辑:程序博客网 时间:2024/05/19 22:04

基于Breeze编程实现,(www.scalanlp.org)。

旨在理解Logistic Regression - Gradient Descent,矢量化(vectorization)编程的基本原理。

package org.lily.optimization.testimport breeze.linalg._import breeze.math._import breeze.numerics._object LRSGDbreezeDemo {  def main(args: Array[String]): Unit = {    val numRows = 3//m    val numCols = 4//n//    val SourceArr = new Array[Double](12)    val arr = Array(1.0,4.0,3.0,2.0,2.0,8.0,9.0,6.0,3.0,4.0,2.0,9.0)//    for(i <- 0 to SourceArr.length - 1) {SourceArr(i) = i.toDouble}    val dm = new DenseMatrix(numRows, numCols, arr)    val label = new DenseVector(Array(1.0, 0.0, 0.0))    val alpha = 0.01    var loss = 10.0    val numIterations = 1000//迭代次数    println(dm)    println("")    println(dm.t)    var theta = new DenseVector(new Array[Double](numCols))    var Curtheta = new DenseVector(new Array[Double](numCols))        val I = DenseVector.ones[Double](numRows)    for (i <- 1 to numIterations         if (loss > 0.01)        ) {          var A = dm * theta          var E = sigmoid(A) - label          Curtheta = theta - alpha * dm.t * E            var H = dm * Curtheta          loss = -(1/numRows.toDouble)*sum((log(sigmoid(H)):*label) + (log(I-sigmoid(H)):*(I-label)))          theta = Curtheta          println("the Current loss: " + loss)          println("the Current theta: " + theta)          println("the " + i + "th Iteration")    }  }}
参考文章:http://blog.csdn.net/pakko/article/details/37878837

1 0