fixed gradient ascent

This commit is contained in:
Ziver Koc 2018-05-22 17:31:58 +02:00
parent 45b1f51685
commit a938b70d57
4 changed files with 31 additions and 3 deletions

View file

@ -112,6 +112,22 @@ public class Matrix {
return result;
}
/**
* Element multiplication, each element in vector1 will be
* multiplied with the corresponding element in vector2.
*
* @return a new vector with the result
*/
public static double[] multiply(double[] vector1, double[] vector2) {
vectorPreCheck(vector1, vector2);
double[] result = new double[vector1.length];
for (int i = 0; i < vector1.length; ++i) {
result[i] = vector1[i] * vector2[i];
}
return result;
}
/**
* Element multiplication, each element in matrix1 will be
* multiplied with the corresponding element in matrix2.
@ -160,6 +176,7 @@ public class Matrix {
return result;
}
private static void elementalPreCheck(double[][] matrix1, double[][] matrix2) {
if (matrix1.length != matrix2.length || matrix1[0].length != matrix2[0].length)
throw new IllegalArgumentException("Matrices need to be of same dimension: " +

View file

@ -46,14 +46,15 @@ public class LinearRegression {
*
* @return the theta that was found to minimize the cost function
*/
public static double[] gradientDescent(double[][] x, double[] y, double[] theta, double alpha){
public static double[] gradientDescentIteration(double[][] x, double[] y, double[] theta, double alpha){
double[] newTheta = new double[theta.length];
double m = y.length;
double[] hypothesis = calculateHypothesis(x, theta);
double[] normalized = Matrix.subtract(hypothesis, y);
for (int j= 0; j < theta.length; j++) {
newTheta[j] = theta[j] - alpha * (1.0/m) * Matrix.sum(Matrix.add(normalized, Matrix.getColumn(x, j)));
newTheta[j] = theta[j] - alpha * (1.0/m) * Matrix.sum(
Matrix.Elemental.multiply(normalized, Matrix.getColumn(x, j)));
}
return newTheta;