Skip to content

Commit

Permalink
Merge pull request #56 from umnil/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
kevincar authored Jun 5, 2024
2 parents bc6f479 + 197119d commit dec87d5
Show file tree
Hide file tree
Showing 50 changed files with 227 additions and 88 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
python-version: "3.10"

- name: Install dependencies
run: pip install -r requirements.txt
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import numpy as np

from sklearn.pipeline import FeatureUnion # type: ignore
from sklearn.utils import Bunch # type: ignore
from sklearn.utils.metadata_routing import ( # type: ignore
_routing_enabled,
process_routing,
)
from .transform_pipeline import _transform_one, _fit_transform_one

try:
Expand All @@ -10,7 +15,7 @@


class TransformFeatureUnion(FeatureUnion):
def fit_transform(self, x, y=None, **fit_params):
def fit_transform(self, x, y=None, **params):
"""Fit all transformers, transform the data and concatenate results.
Parameters
Expand All @@ -21,7 +26,7 @@ def fit_transform(self, x, y=None, **fit_params):
y : array-like of shape (n_samples, n_outputs), default=None
Targets for supervised learning.
**fit_params : dict, default=None
**params : dict, default=None
Parameters to pass to the fit method of the estimator.
Returns
Expand All @@ -31,7 +36,20 @@ def fit_transform(self, x, y=None, **fit_params):
The `hstack` of results of transformers. `sum_n_components` is the
sum of `n_components` (output dimension) over transformers.
"""
results = self._parallel_func(x, y, fit_params, _fit_transform_one)
if _routing_enabled():
routed_params = process_routing(self, "fit_transform", **params)
else:
routed_params = Bunch()
for name, obj in self.transformer_list:
if hasattr(obj, "fit_transformer"):
routed_params[name] = Bunch(fit_transform={})
routed_params[name].fit_transform = params
else:
routed_params[name] = Bunch(fit={})
routed_params[name] = Bunch(transform={})
routed_params[name].fit = params

results = self._parallel_func(x, y, _fit_transform_one, routed_params)
if not results:
# All transformers are None
self._y_hat = y
Expand Down
6 changes: 3 additions & 3 deletions pipeline/funcs.py → preprocessingpipeline/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@ def good_channels(x: List[mne.io.Raw]) -> List[mne.io.Raw]:
]


def pwr(x: np.ndarray) -> np.ndarray:
"""Compute the power scale of a spectral density
def db(x: np.ndarray) -> np.ndarray:
"""Compute the decibel scale of a spectral density
Parameters
----------
Expand All @@ -130,4 +130,4 @@ def pwr(x: np.ndarray) -> np.ndarray:
np.ndarray
The same ax x transformed
"""
return -20 * np.log10(x + 1e-15)
return 10 * np.log10(x + 1e-15)
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit dec87d5

Please sign in to comment.