fixed gradient ascent
This commit is contained in:
parent
45b1f51685
commit
a938b70d57
4 changed files with 31 additions and 3 deletions
|
|
@ -112,6 +112,22 @@ public class Matrix {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Element multiplication, each element in vector1 will be
|
||||
* multiplied with the corresponding element in vector2.
|
||||
*
|
||||
* @return a new vector with the result
|
||||
*/
|
||||
public static double[] multiply(double[] vector1, double[] vector2) {
|
||||
vectorPreCheck(vector1, vector2);
|
||||
double[] result = new double[vector1.length];
|
||||
|
||||
for (int i = 0; i < vector1.length; ++i) {
|
||||
result[i] = vector1[i] * vector2[i];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Element multiplication, each element in matrix1 will be
|
||||
* multiplied with the corresponding element in matrix2.
|
||||
|
|
@ -160,6 +176,7 @@ public class Matrix {
|
|||
return result;
|
||||
}
|
||||
|
||||
|
||||
private static void elementalPreCheck(double[][] matrix1, double[][] matrix2) {
|
||||
if (matrix1.length != matrix2.length || matrix1[0].length != matrix2[0].length)
|
||||
throw new IllegalArgumentException("Matrices need to be of same dimension: " +
|
||||
|
|
|
|||
|
|
@ -46,14 +46,15 @@ public class LinearRegression {
|
|||
*
|
||||
* @return the theta that was found to minimize the cost function
|
||||
*/
|
||||
public static double[] gradientDescent(double[][] x, double[] y, double[] theta, double alpha){
|
||||
public static double[] gradientDescentIteration(double[][] x, double[] y, double[] theta, double alpha){
|
||||
double[] newTheta = new double[theta.length];
|
||||
double m = y.length;
|
||||
double[] hypothesis = calculateHypothesis(x, theta);
|
||||
double[] normalized = Matrix.subtract(hypothesis, y);
|
||||
|
||||
for (int j= 0; j < theta.length; j++) {
|
||||
newTheta[j] = theta[j] - alpha * (1.0/m) * Matrix.sum(Matrix.add(normalized, Matrix.getColumn(x, j)));
|
||||
newTheta[j] = theta[j] - alpha * (1.0/m) * Matrix.sum(
|
||||
Matrix.Elemental.multiply(normalized, Matrix.getColumn(x, j)));
|
||||
}
|
||||
|
||||
return newTheta;
|
||||
|
|
|
|||
|
|
@ -105,6 +105,16 @@ public class MatrixTest {
|
|||
|
||||
@Test
|
||||
public void vectorMultiply(){
|
||||
assertArrayEquals(
|
||||
new double[]{0.1, 0.4, 0.9, 1.6},
|
||||
Matrix.Elemental.multiply(
|
||||
new double[]{1, 2, 3, 4},
|
||||
new double[]{0.1, 0.2, 0.3, 0.4}),
|
||||
0.001);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void vectorMatrixMultiply(){
|
||||
assertArrayEquals(
|
||||
new double[]{1.4, 1.9, 2.4, 2.9},
|
||||
Matrix.multiply(
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ public class LinearRegressionTest {
|
|||
|
||||
@Test
|
||||
public void gradientAscent() {
|
||||
double[] theta = LinearRegression.gradientDescent( // one iteration
|
||||
double[] theta = LinearRegression.gradientDescentIteration( // one iteration
|
||||
/* x */ new double[][]{{1, 5},{1, 2},{1, 4},{1, 5}},
|
||||
/* y */ new double[]{1, 6, 4, 2},
|
||||
/* theta */ new double[]{0, 0},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue