From 93fbaef1d4bf0842aec0f756ef1e54078b57b7d0 Mon Sep 17 00:00:00 2001 From: ALX-7777 Date: Sun, 16 Nov 2025 20:46:24 +0100 Subject: [PATCH 1/3] assignment done BERNAL Alix --- numpy_questions.py | 27 +++++++++++++++++---- sklearn_questions.py | 57 +++++++++++++++++++++++++++++++++----------- 2 files changed, 66 insertions(+), 18 deletions(-) diff --git a/numpy_questions.py b/numpy_questions.py index 21fcec4b..0b781789 100644 --- a/numpy_questions.py +++ b/numpy_questions.py @@ -15,6 +15,7 @@ This will be enforced with `flake8`. You can check that there is no flake8 errors by calling `flake8` at the root of the repo. """ + import numpy as np @@ -40,8 +41,17 @@ def max_index(X): i = 0 j = 0 - # TODO + # check that X is a numpy array + if not isinstance(X, np.ndarray): + raise ValueError("X must be a numpy array.") + + # check it has 2 dimensions + if len(X.shape) != 2: + raise ValueError("X must be 2-dimensional.") + max_value = np.argmax(X) # find the maximum + + i, j = np.unravel_index(max_value, X.shape) # find where is the maximum return i, j @@ -62,6 +72,15 @@ def wallis_product(n_terms): pi : float The approximation of order `n_terms` of pi using the Wallis product. """ - # XXX : The n_terms is an int that corresponds to the number of - # terms in the product. For example 10000. - return 0. + + if not isinstance(n_terms, int) or n_terms < 0: + raise ValueError("n_terms must be a non-negative integer") + + if n_terms == 0: + return 1.0 + + n = np.arange(1, n_terms + 1) # the range for n (times we multiply) + + wallis_product = np.prod((4 * n**2) / (4 * n**2 - 1)) + + return wallis_product * 2 # we multiply by two to find pi diff --git a/sklearn_questions.py b/sklearn_questions.py index f65038c6..55a103b6 100644 --- a/sklearn_questions.py +++ b/sklearn_questions.py @@ -19,6 +19,7 @@ for the methods you code and for the class. The docstring will be checked using `pydocstyle` that you can also call at the root of the repo. """ + import numpy as np from sklearn.base import BaseEstimator from sklearn.base import ClassifierMixin @@ -28,47 +29,75 @@ from sklearn.utils.multiclass import check_classification_targets -class OneNearestNeighbor(BaseEstimator, ClassifierMixin): +class OneNearestNeighbor(ClassifierMixin, BaseEstimator): "OneNearestNeighbor classifier." def __init__(self): # noqa: D107 pass def fit(self, X, y): - """Write docstring. - - And describe parameters """ + Parameters: + ---------- + X : array, shape(n_samples, n_features) - training data + Y : array, shape(n_samples, n_features) - target + + Return: + ---------- + self : OneNearestNeighbor - the fitted estimator + """ + X, y = check_X_y(X, y) check_classification_targets(y) self.classes_ = np.unique(y) self.n_features_in_ = X.shape[1] - # XXX fix + self.X_train_ = X + self.y_train_ = y + return self def predict(self, X): - """Write docstring. + """ + Parameters: + ---------- + X : array, shape(n_samples, n_features) - test data + + + Return: + ---------- + y_pred : array, shape(n_samples,) - the predicted class for each sample - And describe parameters """ check_is_fitted(self) - X = check_array(X) + X = self._validate_data(X, reset=False) + y_pred = np.full( - shape=len(X), fill_value=self.classes_[0], + shape=len(X), fill_value=self.classes_[0], dtype=self.classes_.dtype ) - # XXX fix + for i, x in enumerate(X): + distances = np.sqrt(np.sum((self.X_train_ - x) ** 2, axis=1)) + nearest_index = np.argmin(distances) + y_pred[i] = self.y_train_[nearest_index] + return y_pred def score(self, X, y): - """Write docstring. + """ + Parameters: + ---------- + X : array, shape(n_samples, n_features) - test data + y : array, shape(n_samples, n_features) - train data + + Return: + ---------- + accuracy : float - the mean accuracy for the prediction - And describe parameters """ X, y = check_X_y(X, y) y_pred = self.predict(X) - # XXX fix - return y_pred.sum() + accuracy = np.mean(y_pred == y) + return accuracy From c674427862d14ad3716ff4c731d10042dd6039ca Mon Sep 17 00:00:00 2001 From: ALX-7777 Date: Sun, 16 Nov 2025 20:52:25 +0100 Subject: [PATCH 2/3] updates sklearn Bernal --- sklearn_questions.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sklearn_questions.py b/sklearn_questions.py index 55a103b6..dd7fe7ca 100644 --- a/sklearn_questions.py +++ b/sklearn_questions.py @@ -24,7 +24,6 @@ from sklearn.base import BaseEstimator from sklearn.base import ClassifierMixin from sklearn.utils.validation import check_X_y -from sklearn.utils.validation import check_array from sklearn.utils.validation import check_is_fitted from sklearn.utils.multiclass import check_classification_targets @@ -66,14 +65,14 @@ def predict(self, X): Return: ---------- - y_pred : array, shape(n_samples,) - the predicted class for each sample - + y_pred : array, shape(n_samples,) - + the predicted class for each sample """ check_is_fitted(self) X = self._validate_data(X, reset=False) y_pred = np.full( - shape=len(X), fill_value=self.classes_[0], + shape=len(X), fill_value=self.classes_[0], dtype=self.classes_.dtype ) From 7fb273b4b6a1f3d270adce472c183427322e9d07 Mon Sep 17 00:00:00 2001 From: ALX-7777 Date: Sun, 16 Nov 2025 21:05:30 +0100 Subject: [PATCH 3/3] updated files with flake and pydocstring --- numpy_questions.py | 4 ---- sklearn_questions.py | 24 +++++++++++++----------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/numpy_questions.py b/numpy_questions.py index 0b781789..894176ce 100644 --- a/numpy_questions.py +++ b/numpy_questions.py @@ -72,15 +72,11 @@ def wallis_product(n_terms): pi : float The approximation of order `n_terms` of pi using the Wallis product. """ - if not isinstance(n_terms, int) or n_terms < 0: raise ValueError("n_terms must be a non-negative integer") if n_terms == 0: return 1.0 - n = np.arange(1, n_terms + 1) # the range for n (times we multiply) - wallis_product = np.prod((4 * n**2) / (4 * n**2 - 1)) - return wallis_product * 2 # we multiply by two to find pi diff --git a/sklearn_questions.py b/sklearn_questions.py index dd7fe7ca..fac12ccd 100644 --- a/sklearn_questions.py +++ b/sklearn_questions.py @@ -29,23 +29,23 @@ class OneNearestNeighbor(ClassifierMixin, BaseEstimator): - "OneNearestNeighbor classifier." + """One Nearest Neighbor classifier.""" def __init__(self): # noqa: D107 pass def fit(self, X, y): - """ - Parameters: + """Fit the OneNearestNeighbor class. + + Parameters ---------- X : array, shape(n_samples, n_features) - training data Y : array, shape(n_samples, n_features) - target - Return: + Return ---------- self : OneNearestNeighbor - the fitted estimator """ - X, y = check_X_y(X, y) check_classification_targets(y) self.classes_ = np.unique(y) @@ -57,13 +57,14 @@ def fit(self, X, y): return self def predict(self, X): - """ - Parameters: + """Prediction of the OneNearestNeighbor class. + + Parameters ---------- X : array, shape(n_samples, n_features) - test data - Return: + Return ---------- y_pred : array, shape(n_samples,) - the predicted class for each sample @@ -84,13 +85,14 @@ def predict(self, X): return y_pred def score(self, X, y): - """ - Parameters: + """Give accuracy score. + + Parameters ---------- X : array, shape(n_samples, n_features) - test data y : array, shape(n_samples, n_features) - train data - Return: + Return ---------- accuracy : float - the mean accuracy for the prediction