Skip to content
This repository was archived by the owner on Dec 16, 2021. It is now read-only.

Commit a8496f5

Browse files
committed
update
1 parent 3b5b614 commit a8496f5

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed

Code-Example-016.1-LogisticRegression-without-lib.ipynb

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
"n_feature = 2 # the number of features\n",
4343
"n_components = 2 # the number of clusters\n",
4444
"\n",
45-
"n=1000 # the number of total samples"
45+
"n = 1000 # the number of total samples"
4646
]
4747
},
4848
{
@@ -207,21 +207,21 @@
207207
" I/P\n",
208208
" ----------\n",
209209
" X : 2D array where each row represent the training example and each column represent the feature ndarray. \n",
210-
" Dimension(m x n)\n",
211-
" m= number of training examples\n",
212-
" n= number of features (including X_0 column of ones)\n",
213-
" y : 1D array of labels/target value for each traing example. dimension(1 x m)\n",
210+
" Dimension (n x d)\n",
211+
" n = number of training examples\n",
212+
" d = number of features (including X_0 column of ones)\n",
213+
" y : 1D array of labels/target value for each traing example. dimension(1 x n)\n",
214214
"\n",
215-
" weights : 1D array of fitting parameters or weights. Dimension (1 x n)\n",
215+
" weights : 1D array of fitting parameters or weights. Dimension (1 x d)\n",
216216
"\n",
217217
" O/P\n",
218218
" -------\n",
219219
" cost : The cost of using theta as the parameter for linear regression to fit the data points in X and y.\n",
220220
" \"\"\"\n",
221-
" m, n = X.shape\n",
221+
" n, d = X.shape\n",
222222
" x_dot_weights = X.dot(weights)\n",
223223
"\n",
224-
" cost = 1.0 / m * (-y.T.dot(np.log(sigmoid(x_dot_weights))) - (1 - y).T.dot(np.log(1 - sigmoid(x_dot_weights))))\n",
224+
" cost = 1.0 / n * (-y.T.dot(np.log(sigmoid(x_dot_weights))) - (1 - y).T.dot(np.log(1 - sigmoid(x_dot_weights))))\n",
225225
"\n",
226226
" return cost"
227227
]
@@ -250,10 +250,10 @@
250250
" -------\n",
251251
" grad: (numpy array)The gradient of the cost with respect to the parameters theta\n",
252252
" \"\"\"\n",
253-
" m, n = X.shape\n",
253+
" n, d = X.shape\n",
254254
" x_dot_weights = X.dot(weights)\n",
255255
"\n",
256-
" grad = (1.0 / m )* (sigmoid(x_dot_weights) - y).T.dot(X)\n",
256+
" grad = (1.0 / n )* (sigmoid(x_dot_weights) - y).T.dot(X)\n",
257257
"\n",
258258
" return grad"
259259
]
@@ -463,9 +463,9 @@
463463
" I/P\n",
464464
" ----------\n",
465465
" X : 2D array where each row represent the training example and each column represent the feature ndarray. \n",
466-
" Dimension(m x n)\n",
467-
" m= number of training examples\n",
468-
" n= number of features (including X_0 column of ones)\n",
466+
" Dimension(n x d)\n",
467+
" n= number of training examples\n",
468+
" d= number of features (including X_0 column of ones)\n",
469469
"\n",
470470
" theta : 1D array of fitting parameters or weights. Dimension (1 x n)\n",
471471
"\n",

0 commit comments

Comments
 (0)