diff --git a/Lectures/05 Supervised Learning - Regression/04 Gradient Descent for Linear Regression.ipynb b/Lectures/05 Supervised Learning - Regression/04 Gradient Descent for Linear Regression.ipynb index 38f099f..5692b6c 100644 --- a/Lectures/05 Supervised Learning - Regression/04 Gradient Descent for Linear Regression.ipynb +++ b/Lectures/05 Supervised Learning - Regression/04 Gradient Descent for Linear Regression.ipynb @@ -263,9 +263,9 @@ "source": [ "To minimize the cost function, we need to know in which direction to adjust our parameters. This is where partial derivatives come in. We calculate the partial derivative of the cost function with respect to each parameter:\n", "\n", - "$$ \\frac{\\partial J}{\\partial \\beta_0} = -\\frac{1}{n} \\sum_{i=1}^n x_i(y_i - (\\beta_0 + \\beta_1 x_i)) $$\n", + "$$ \\frac{\\partial J}{\\partial \\beta_0} = -\\frac{1}{n} \\sum_{i=1}^n (y_i - (\\beta_0 + \\beta_1 x_i)) $$\n", "\n", - "$$ \\frac{\\partial J}{\\partial \\beta_1} = -\\frac{1}{n} \\sum_{i=1}^n (y_i - (\\beta_0 + \\beta_1 x_i)) $$\n" + "$$ \\frac{\\partial J}{\\partial \\beta_1} = -\\frac{1}{n} \\sum_{i=1}^n x_i(y_i - (\\beta_0 + \\beta_1 x_i)) $$\n" ] }, {