Ошибка «Недостаточно значений для распаковки» при использовании RandomizedSearchCV с конвейером - PullRequest
0 голосов
/ 17 апреля 2019

Я пытаюсь обучить нейронную сеть. Пока я пытаюсь вписать данные в randomizedsearchcv, я получаю следующую ошибку.

 ValueError                                Traceback (most recent call last)
<ipython-input-167-e39f0f3cb2bd> in <module>()
     27 
     28 
---> 29 grid.fit(x_train, y_train)

/usr/local/lib/python3.6/dist-packages/sklearn/model_selection/_search.py in fit(self, X, y, groups, **fit_params)
    720                 return results_container[0]
    721 
--> 722             self._run_search(evaluate_candidates)
    723 
    724         results = results_container[0]

/usr/local/lib/python3.6/dist-packages/sklearn/model_selection/_search.py in _run_search(self, evaluate_candidates)
   1513         evaluate_candidates(ParameterSampler(
   1514             self.param_distributions, self.n_iter,
-> 1515             random_state=self.random_state))

/usr/local/lib/python3.6/dist-packages/sklearn/model_selection/_search.py in evaluate_candidates(candidate_params)
    709                                for parameters, (train, test)
    710                                in product(candidate_params,
--> 711                                           cv.split(X, y, groups)))
    712 
    713                 all_candidate_params.extend(candidate_params)

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/parallel.py in __call__(self, iterable)
    915             # remaining jobs.
    916             self._iterating = False
--> 917             if self.dispatch_one_batch(iterator):
    918                 self._iterating = self._original_iterator is not None
    919 

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/parallel.py in dispatch_one_batch(self, iterator)
    757                 return False
    758             else:
--> 759                 self._dispatch(tasks)
    760                 return True
    761 

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/parallel.py in _dispatch(self, batch)
    714         with self._lock:
    715             job_idx = len(self._jobs)
--> 716             job = self._backend.apply_async(batch, callback=cb)
    717             # A job can complete so quickly than its callback is
    718             # called before we get here, causing self._jobs to

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/_parallel_backends.py in apply_async(self, func, callback)
    180     def apply_async(self, func, callback=None):
    181         """Schedule a func to be run"""
--> 182         result = ImmediateResult(func)
    183         if callback:
    184             callback(result)

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/_parallel_backends.py in __init__(self, batch)
    547         # Don't delay the application, to avoid keeping the input
    548         # arguments in memory
--> 549         self.results = batch()
    550 
    551     def get(self):

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/parallel.py in __call__(self)
    223         with parallel_backend(self._backend, n_jobs=self._n_jobs):
    224             return [func(*args, **kwargs)
--> 225                     for func, args, kwargs in self.items]
    226 
    227     def __len__(self):

/usr/local/lib/python3.6/dist-packages/sklearn/externals/joblib/parallel.py in <listcomp>(.0)
    223         with parallel_backend(self._backend, n_jobs=self._n_jobs):
    224             return [func(*args, **kwargs)
--> 225                     for func, args, kwargs in self.items]
    226 
    227     def __len__(self):

/usr/local/lib/python3.6/dist-packages/sklearn/model_selection/_validation.py in _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, return_train_score, return_parameters, return_n_test_samples, return_times, return_estimator, error_score)
    526             estimator.fit(X_train, **fit_params)
    527         else:
--> 528             estimator.fit(X_train, y_train, **fit_params)
    529 
    530     except Exception as e:

/usr/local/lib/python3.6/dist-packages/sklearn/pipeline.py in fit(self, X, y, **fit_params)
    263             This estimator
    264         """
--> 265         Xt, fit_params = self._fit(X, y, **fit_params)
    266         if self._final_estimator is not None:
    267             self._final_estimator.fit(Xt, y, **fit_params)

/usr/local/lib/python3.6/dist-packages/sklearn/pipeline.py in _fit(self, X, y, **fit_params)
    200                                 if step is not None)
    201         for pname, pval in six.iteritems(fit_params):
--> 202             step, param = pname.split('__', 1)
    203             fit_params_steps[step][param] = pval
    204         Xt = X

ValueError: not enough values to unpack (expected 2, got 1)

Я провел некоторое исследование, и это может быть вызвано проверкой kfold. Но даже если я удалил резюме из функции, он дал ту же ошибку. Я также проверил функцию подгонки, x / y_train должно быть достаточно.

Спасибо.

Мой код указан ниже.

seed = 19
callback_dict = {'plot_losses': [plot_losses], 'history': [History()]}
clf = KerasRegressor(build_fn = create_model, epochs = 2, batch_size = 24 , shuffle = True, verbose = 1)

kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=seed)
pipeline = Pipeline([('scaler',StandardScaler()), ('clf',clf)])

para_grid = {'clf__act':['relu', 'LeakyReLU'],'clf__dropout':[0.1, 0.2, 0.5], 'clf__L_1':[0, 1e-1, 1e-2, 1e-4],'clf__L_2':[0, 1e-1, 1e-2, 1e-4],
                'clf__lr':[0.001, 0.0001, 0.0005, 0.000146, 0.00001]}

grid = RandomizedSearchCV(pipeline, cv=kfold,  param_distributions = para_grid, n_jobs = 1, random_state= seed, fit_params = callback_dict, scoring= "neg_mean_squared_error"  )
grid.fit(x_train, y_train)
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...