|
1 | 1 | import six
|
2 | 2 | from sklearn.pipeline import _name_estimators, Pipeline
|
| 3 | +from sklearn.utils import tosequence |
3 | 4 |
|
4 | 5 |
|
5 | 6 | class TransformerPipeline(Pipeline):
|
6 | 7 | """
|
7 |
| - Pipeline that expects all steps to be transformers taking a single argument. |
| 8 | + Pipeline that expects all steps to be transformers taking a single argument |
| 9 | + and having fit and transform methods. |
8 | 10 |
|
9 | 11 | Code is copied from sklearn's Pipeline, leaving out the `y=None` argument.
|
10 | 12 | """
|
| 13 | + def __init__(self, steps): |
| 14 | + names, estimators = zip(*steps) |
| 15 | + if len(dict(steps)) != len(steps): |
| 16 | + raise ValueError("Provided step names are not unique: %s" % (names,)) |
| 17 | + |
| 18 | + # shallow copy of steps |
| 19 | + self.steps = tosequence(steps) |
| 20 | + estimator = estimators[-1] |
| 21 | + |
| 22 | + for e in estimators: |
| 23 | + if (not (hasattr(e, "fit") or hasattr(e, "fit_transform")) or not |
| 24 | + hasattr(e, "transform")): |
| 25 | + raise TypeError("All steps of the chain should " |
| 26 | + "be transforms and implement fit and transform" |
| 27 | + " '%s' (type %s) doesn't)" % (e, type(e))) |
| 28 | + |
| 29 | + if not hasattr(estimator, "fit"): |
| 30 | + raise TypeError("Last step of chain should implement fit " |
| 31 | + "'%s' (type %s) doesn't)" |
| 32 | + % (estimator, type(estimator))) |
| 33 | + |
11 | 34 | def _pre_transform(self, X, **fit_params):
|
12 | 35 | fit_params_steps = dict((step, {}) for step, _ in self.steps)
|
13 | 36 | for pname, pval in six.iteritems(fit_params):
|
|
0 commit comments