Skip to content

Commit

Permalink
change Polynomial check from class name to isinstance method. Also re…
Browse files Browse the repository at this point in the history
…move torch==2.4.0 due to pip error
  • Loading branch information
wilsonrljr committed Sep 3, 2024
1 parent f878e83 commit aae0c80
Show file tree
Hide file tree
Showing 9 changed files with 81 additions and 52 deletions.
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ dependencies = ["numpy>=1.19.2,<2.0", "scipy>=1.7.0", "matplotlib>=3.3.2"]
dynamic = ["version"]

[project.optional-dependencies]

dev = [
"pytest >=7.0.0,<8.0.0",
"pytest-cov >=2.12.0,<4.0.0",
Expand Down Expand Up @@ -87,7 +88,7 @@ doc = [
# avoid jinja import error using 3.0.3 version
"jinja2==3.0.3",
]
all = ["torch >=1.7.1"]
all = ["torch >=1.7.1, <2.4.0"]

[project.urls]
homepage = "http://sysidentpy.org"
Expand Down
21 changes: 9 additions & 12 deletions sysidentpy/general_estimators/narx.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,12 @@ def fit(self, *, X=None, y=None):
self.max_lag = self._get_max_lag()
lagged_data = self.build_matrix(X, y)
reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)

if X is not None:
Expand Down Expand Up @@ -213,7 +218,7 @@ def predict(
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down Expand Up @@ -263,11 +268,7 @@ def _one_step_ahead_prediction(self, X, y):
"""
lagged_data = self.build_matrix(X, y)
X_base = self.basis_function.transform(
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type
)

yhat = self.base_estimator.predict(X_base)
Expand Down Expand Up @@ -485,11 +486,7 @@ def _basis_function_predict(self, X, y_initial, forecast_horizon=None):
yhat[i : i + analyzed_elements_number].reshape(-1, 1),
)
X_tmp = self.basis_function.transform(
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type
)

a = self.base_estimator.predict(X_tmp)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,12 @@ def fit(self, *, X: Optional[np.ndarray] = None, y: Optional[np.ndarray] = None)
self.max_lag = self._get_max_lag()
lagged_data = self.build_matrix(X, y)
reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)

if X is not None:
Expand Down Expand Up @@ -371,7 +376,7 @@ def predict(
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down
9 changes: 7 additions & 2 deletions sysidentpy/model_structure_selection/entropic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -584,7 +584,12 @@ def fit(self, *, X=None, y=None):
lagged_data = self.build_matrix(X, y)

reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)

if X is not None:
Expand Down Expand Up @@ -687,7 +692,7 @@ def predict(self, *, X=None, y=None, steps_ahead=None, forecast_horizon=None):
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -602,7 +602,12 @@ def fit(self, *, X: Optional[np.ndarray] = None, y: np.ndarray):
lagged_data = self.build_matrix(X, y)

reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)

if X is not None:
Expand Down Expand Up @@ -689,7 +694,7 @@ def predict(
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -385,7 +385,12 @@ def evaluate_objective_function(
lagged_data = self.build_matrix(X_train, y_train)

psi = self.basis_function.fit(
lagged_data, self.max_lag, self.xlag, self.ylag, self.model_type, predefined_regressors=self.pivv
lagged_data,
self.max_lag,
self.xlag,
self.ylag,
self.model_type,
predefined_regressors=self.pivv,
)

pos_insignificant_terms, _, _ = self.perform_t_test(
Expand Down Expand Up @@ -607,7 +612,7 @@ def predict(
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down
40 changes: 18 additions & 22 deletions sysidentpy/neural_network/narx_nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,15 +271,24 @@ def split_data(self, X, y):
self.max_lag = self._get_max_lag()
lagged_data = self.build_matrix(X, y)

basis_name = self.basis_function.__class__.__name__
if basis_name == "Polynomial":
if isinstance(self.basis_function, Polynomial):
reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)
reg_matrix = reg_matrix[:, 1:]
else:
reg_matrix = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=None
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=None,
)

if X is not None:
Expand Down Expand Up @@ -476,7 +485,7 @@ def predict(self, *, X=None, y=None, steps_ahead=None, forecast_horizon=None):
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
return self._model_prediction(X, y, forecast_horizon=forecast_horizon)
if steps_ahead == 1:
Expand Down Expand Up @@ -513,23 +522,14 @@ def _one_step_ahead_prediction(self, X, y):
"""
lagged_data = self.build_matrix(X, y)

basis_name = self.basis_function.__class__.__name__
if basis_name == "Polynomial":
if isinstance(self.basis_function, Polynomial):
X_base = self.basis_function.transform(
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type
)
X_base = X_base[:, 1:]
else:
X_base = self.basis_function.transform(
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type
)

yhat = np.zeros(X.shape[0], dtype=float)
Expand Down Expand Up @@ -716,11 +716,7 @@ def _basis_function_predict(self, X, y_initial, forecast_horizon=None):
)

X_tmp = self.basis_function.transform(
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type
)
X_tmp = np.atleast_1d(X_tmp).astype(np.float32)
yhat = yhat.astype(np.float32)
Expand Down
18 changes: 14 additions & 4 deletions sysidentpy/simulation/_simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _validate_simulate_params(self):
)

def _check_simulate_params(self, y_train, y_test, model_code, steps_ahead, theta):
if self.basis_function.__class__.__name__ != "Polynomial":
if not isinstance(self.basis_function, Polynomial):
raise NotImplementedError(
"Currently, SimulateNARMAX only works for polynomial models."
)
Expand Down Expand Up @@ -277,7 +277,12 @@ def simulate(
self.max_lag = self._get_max_lag()
lagged_data = self.build_matrix(X_train, y_train)
psi = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=self.pivv
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=self.pivv,
)

self.theta = self.estimator.optimize(
Expand All @@ -303,7 +308,12 @@ def simulate(
self.max_lag = self._get_max_lag()
lagged_data = self.build_matrix(X_train, y_train)
psi = self.basis_function.fit(
lagged_data, self.max_lag, self.ylag, self.xlag, self.model_type, predefined_regressors=self.pivv
lagged_data,
self.max_lag,
self.ylag,
self.xlag,
self.model_type,
predefined_regressors=self.pivv,
)

_, self.err, _, _ = self.error_reduction_ratio(
Expand Down Expand Up @@ -434,7 +444,7 @@ def predict(self, *, X=None, y=None, steps_ahead=None, forecast_horizon=None):
The predicted values of the model.
"""
if self.basis_function.__class__.__name__ == "Polynomial":
if isinstance(self.basis_function, Polynomial):
if steps_ahead is None:
yhat = self._model_prediction(X, y, forecast_horizon=forecast_horizon)
yhat = np.concatenate([y[: self.max_lag], yhat], axis=0)
Expand Down
15 changes: 10 additions & 5 deletions sysidentpy/utils/narmax_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import numpy as np

from ..narmax_base import RegressorDictionary
from ..basis_function import Polynomial
from ._check_arrays import _num_features


Expand Down Expand Up @@ -47,24 +48,28 @@ def regressor_code(
xlag=xlag, ylag=ylag, model_type=model_type, basis_function=basis_function
).regressor_space(n_inputs)

basis_name = basis_function.__class__.__name__
if basis_name != "Polynomial" and basis_function.ensemble:
if not isinstance(basis_function, Polynomial) and basis_function.ensemble:
repetition = basis_function.n * 2
basis_code = np.sort(
np.tile(encoding[1:, :], (repetition, 1)),
axis=0,
)
encoding = np.concatenate([encoding[1:], basis_code])
elif basis_name != "Polynomial" and basis_function.ensemble is False:
elif (
not isinstance(basis_function, Polynomial) and basis_function.ensemble is False
):
repetition = basis_function.n * 2
encoding = np.sort(
np.tile(encoding[1:, :], (repetition, 1)),
axis=0,
)

if basis_name == "Polynomial" and model_representation == "neural_network":
if (
isinstance(basis_function, Polynomial)
and model_representation == "neural_network"
):
return encoding[1:]
if basis_name == "Polynomial" and model_representation is None:
if isinstance(basis_function, Polynomial) and model_representation is None:
return encoding

return encoding
Expand Down

0 comments on commit aae0c80

Please sign in to comment.