Skip to content

Commit

Permalink
[MNT] Code quality updates (#5786)
Browse files Browse the repository at this point in the history
Automated changes from code quality tools, no manual changes.

1. updated version of pre-commit hooks (`pre-commit autoupdate`)
2. ran ruff with default settings (`ruff sktime`) -> ignored changes in
`sktime/_contrib`
3. ran all hooks on all files (`pre-commit run --all-files`) -> ignored
changes in `examples/blog_posts`
  • Loading branch information
yarnabrina authored Jan 22, 2024
1 parent adb880c commit da59816
Show file tree
Hide file tree
Showing 9 changed files with 9 additions and 12 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@ repos:
name: isort

- repo: https://github.com/psf/black
rev: 23.12.0
rev: 23.12.1
hooks:
- id: black
language_version: python3

- repo: https://github.com/pycqa/flake8
rev: 6.1.0
rev: 7.0.0
hooks:
- id: flake8
exclude: docs/conf.py
Expand Down
2 changes: 2 additions & 0 deletions examples/interpolation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@
],
"source": [
"# randomly cut the data series in-place\n",
"\n",
"\n",
"def random_cut(df):\n",
" for row_i in range(df.shape[0]):\n",
" for dim_i in range(df.shape[1]):\n",
Expand Down
2 changes: 1 addition & 1 deletion sktime/classification/distance_based/_shape_dtw.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ def _check_metric_params(self, parameters):
names = list(parameters.keys())

for x in names:
if not (x in valid_metric_params):
if x not in valid_metric_params:
raise ValueError(
x
+ " is not a valid metric parameter."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,7 @@
ProximityForest,
ShapeDTW,
)
from sktime.classification.early_classification import (
TEASER,
ProbabilityThresholdEarlyClassifier,
)
from sktime.classification.early_classification import TEASER
from sktime.classification.feature_based import (
Catch22Classifier,
MatrixProfileClassifier,
Expand Down
2 changes: 1 addition & 1 deletion sktime/forecasting/base/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1163,7 +1163,7 @@ def predict_residuals(self, y=None, X=None):

y_pred = self.predict(fh=fh, X=X)

if not type(y_pred) is type(y):
if type(y_pred) is not type(y):
y = convert_to(y, self._y_metadata["mtype"])

y_res = y - y_pred
Expand Down
1 change: 0 additions & 1 deletion sktime/forecasting/base/adapters/_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ class BaseDeepNetworkPyTorch(BaseForecaster, ABC):
"y_inner_mtype": "pd.DataFrame",
"capability:insample": False,
"capability:pred_int:insample": False,
"python_dependencies": "torch",
"scitype:y": "both",
"ignores-exogeneous-X": True,
}
Expand Down
1 change: 0 additions & 1 deletion sktime/networks/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ class BaseDeepNetworkPyTorch(BaseForecaster, ABC):
"y_inner_mtype": "pd.DataFrame",
"capability:insample": False,
"capability:pred_int:insample": False,
"python_dependencies": "torch",
"scitype:y": "both",
"ignores-exogeneous-X": True,
}
Expand Down
2 changes: 1 addition & 1 deletion sktime/pipeline/step.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def _fetch_input_data(self, fit, required_method, mro, kwargs):
self.mode = result.mode
if result.result is not None:
all_none = False
if len(results) != 0 and not results[0] is None:
if len(results) != 0 and results[0] is not None:
if len(results) > 1:
input_data[step_name] = pd.concat(
results, axis=1, keys=transformer_names
Expand Down
2 changes: 1 addition & 1 deletion sktime/utils/_testing/scenarios_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def get_tag(obj, tag_name):
# applicable only if obj inherits from BaseClassifier, BaseEarlyClassifier or
# BaseRegressor. currently we test both classifiers and regressors using these
# scenarios
if not scitype(obj) in (
if scitype(obj) not in (
"classifier",
"early_classifier",
"regressor",
Expand Down

0 comments on commit da59816

Please sign in to comment.