Skip to content

Commit

Permalink
REF: Remove annotations for Python 3.6
Browse files Browse the repository at this point in the history
  • Loading branch information
aitikgupta committed Jan 29, 2021
1 parent e89e361 commit 6fb9a6c
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 27 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ Aitik Gupta - [Personal Website][website-url]
[linkedin-shield]: https://img.shields.io/badge/-LinkedIn-black.svg?style=flat-square&logo=linkedin&colorB=555
[linkedin-url]: https://linkedin.com/in/aitik-gupta
[product-screenshot]: images/screenshot.png
[python-shield]: https://img.shields.io/badge/python-3.7+-blue.svg
[python-shield]: https://img.shields.io/badge/python-3.6+-blue.svg
[python-url]: https://www.python.org/
[website-shield]: https://img.shields.io/badge/website-aitikgupta.ml-blue?style=flat-square
[website-url]: https://aitikgupta.github.io/
2 changes: 1 addition & 1 deletion swi_ml/classification/logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def __init__(
normalize=False,
initialiser="uniform",
verbose=None,
) -> None:
):
self.activation = activations.Sigmoid()
regularisation = L1_L2Regularisation(
multiply_factor=multiply_factor, l1_ratio=l1_ratio
Expand Down
34 changes: 16 additions & 18 deletions swi_ml/regression/linear_regression.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

import logging
import math
import time
Expand Down Expand Up @@ -32,7 +30,7 @@ def __init__(
regularisation=None,
initialiser="uniform",
verbose=None,
) -> None:
):
if verbose is not None:
logger.setLevel(verbose)
else:
Expand All @@ -45,7 +43,7 @@ def __init__(
self.history = []
self.backend = super().get_backend()

def _initialise_uniform_weights(self, shape: tuple) -> None:
def _initialise_uniform_weights(self, shape: tuple):
self.num_samples, self.num_features = shape
limit = 1 / math.sqrt(self.num_features)
self.W = self.backend.asarray(
Expand All @@ -55,7 +53,7 @@ def _initialise_uniform_weights(self, shape: tuple) -> None:
1,
)

def _initialise_zeros_weights(self, shape: tuple) -> None:
def _initialise_zeros_weights(self, shape: tuple):
self.num_samples, self.num_features = shape
self.W = self.backend.asarray(
self.backend.zeros(
Expand All @@ -66,7 +64,7 @@ def _initialise_zeros_weights(self, shape: tuple) -> None:
1,
)

def _update_history(self) -> None:
def _update_history(self):
self.history.append(self.curr_loss)

def _update_weights(self):
Expand All @@ -82,7 +80,7 @@ def MSE_loss(self, Y_true, Y_pred):
0.5 * (Y_true - Y_pred) ** 2
) + self.regularisation.add_cost_regularisation(self.W)

def initialise_weights(self, X) -> None:
def initialise_weights(self, X):
"""
Initialises weights with correct dimensions
"""
Expand Down Expand Up @@ -154,7 +152,7 @@ def predict(self, X):
def _predict(self, X):
return X.dot(self.W) + self.b

def plot_loss(self) -> None:
def plot_loss(self):
"""
Plots the loss history curve during the training period.
NOTE: This function just plots the graph, to display it
Expand All @@ -174,7 +172,7 @@ def __init__(
normalize=False,
initialiser="uniform",
verbose=None,
) -> None:
):
# regularisation of alpha 0 (essentially NIL)
regularisation = _BaseRegularisation(multiply_factor=0, l1_ratio=0)
super().__init__(
Expand All @@ -186,7 +184,7 @@ def __init__(
verbose,
)

def plot_loss(self) -> None:
def plot_loss(self):
plt.plot(self.history, label="Linear Regression")
super().plot_loss()

Expand All @@ -200,7 +198,7 @@ def __init__(
normalize=False,
initialiser="uniform",
verbose=None,
) -> None:
):
regularisation = L1Regularisation(l1_cost=l1_cost)
super().__init__(
num_iterations,
Expand All @@ -211,7 +209,7 @@ def __init__(
verbose,
)

def plot_loss(self) -> None:
def plot_loss(self):
plt.plot(self.history, label="Lasso Regression")
super().plot_loss()

Expand All @@ -226,7 +224,7 @@ def __init__(
initialiser="uniform",
backend="cupy",
verbose=None,
) -> None:
):
regularisation = L2Regularisation(l2_cost=l2_cost)
super().__init__(
num_iterations,
Expand All @@ -237,7 +235,7 @@ def __init__(
verbose,
)

def plot_loss(self) -> None:
def plot_loss(self):
plt.plot(self.history, label="Ridge Regression")
super().plot_loss()

Expand All @@ -252,7 +250,7 @@ def __init__(
normalize=False,
initialiser="uniform",
verbose=None,
) -> None:
):
regularisation = L1_L2Regularisation(
multiply_factor=multiply_factor, l1_ratio=l1_ratio
)
Expand All @@ -265,7 +263,7 @@ def __init__(
verbose,
)

def plot_loss(self) -> None:
def plot_loss(self):
plt.plot(self.history, label="Elastic Net Regression")
super().plot_loss()

Expand All @@ -281,7 +279,7 @@ def __init__(
normalize=False,
initialiser="uniform",
verbose=None,
) -> None:
):
self.degree = degree
regularisation = L1_L2Regularisation(
multiply_factor=multiply_factor, l1_ratio=l1_ratio
Expand All @@ -303,7 +301,7 @@ def _predict_preprocess(self, data):
poly_data = transform_polynomial(data, self.degree)
return super()._predict_preprocess(poly_data)

def plot_loss(self) -> None:
def plot_loss(self):
plt.plot(
self.history, label=f"Polynomial Regression, degree={self.degree}"
)
Expand Down
6 changes: 3 additions & 3 deletions swi_ml/svm/svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(
self.history = []
self.backend = super().get_backend()

def _initialise_uniform_weights(self, shape: tuple) -> None:
def _initialise_uniform_weights(self, shape: tuple):
self.num_samples, self.num_features = shape
limit = 1 / math.sqrt(self.num_features)
self.W = self.backend.asarray(
Expand All @@ -46,7 +46,7 @@ def _initialise_uniform_weights(self, shape: tuple) -> None:
self.hinge_constant,
)

def _initialise_zeros_weights(self, shape: tuple) -> None:
def _initialise_zeros_weights(self, shape: tuple):
self.num_samples, self.num_features = shape
self.W = self.backend.asarray(
self.backend.zeros(
Expand All @@ -57,7 +57,7 @@ def _initialise_zeros_weights(self, shape: tuple) -> None:
1,
)

def initialise_weights(self, X) -> None:
def initialise_weights(self, X):
"""
Initialises weights with correct dimensions
"""
Expand Down
8 changes: 4 additions & 4 deletions swi_ml/utils/regularisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ class _BaseRegularisation(_Backend):
NOTE: Can be used directly as a L1_L2 (ElasticNet) Regularisation
"""

def __init__(self, multiply_factor: float, l1_ratio: float) -> None:
def __init__(self, multiply_factor: float, l1_ratio: float):
self.multiply_factor = (
multiply_factor if multiply_factor is not None else 1
)
Expand All @@ -30,7 +30,7 @@ class L1Regularisation(_BaseRegularisation):
Lasso Regression Regularisation
"""

def __init__(self, l1_cost: float) -> None:
def __init__(self, l1_cost: float):
multiply_factor = l1_cost
l1_ratio = 1
super().__init__(multiply_factor, l1_ratio)
Expand All @@ -41,7 +41,7 @@ class L2Regularisation(_BaseRegularisation):
Ridge Regression Regularisation
"""

def __init__(self, l2_cost: float) -> None:
def __init__(self, l2_cost: float):
multiply_factor = l2_cost
l1_ratio = 0
super().__init__(multiply_factor, l1_ratio)
Expand All @@ -52,5 +52,5 @@ class L1_L2Regularisation(_BaseRegularisation):
ElasticNet Regression Regularisation
"""

def __init__(self, multiply_factor: float, l1_ratio: float) -> None:
def __init__(self, multiply_factor: float, l1_ratio: float):
super().__init__(multiply_factor, l1_ratio)

0 comments on commit 6fb9a6c

Please sign in to comment.