Я хочу подгонять и прогнозировать, используя Гауссовскую регрессию процесса в моем наборе данных, который имеет входной вектор 3-мерного и 1-мерного выходного. Есть ли способ предсказать использование gp.predict с помощью scikit learn? Но это показывает ошибку:
ValueError: Found array with dim 3. Estimator expected <= 2.
Код:
import xlrd
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic)
from sklearn.metrics.classification import accuracy_score, log_loss
import numpy as np
import matplotlib.pyplot as plt
import seaborn; seaborn.set() # plot formatting
loc = ("C:/Users/yash.bagla/Documents/ML Project/Data/Data_LowGain_Reg_63C.xlsx")
wb = xlrd.open_workbook(loc)
sheet = wb.sheet_by_index(0)
X_d = []
Y = []
# For row 0 and column 0
sheet.cell_value(0, 0)
for i in range(sheet.nrows - 1):
X_d.append((sheet.cell_value(i+1, 0), sheet.cell_value(i+1, 1), sheet.cell_value(i+1, 2)))
Y.append(sheet.cell_value(i+1, 4))
X_l = np.reshape(X_d, (-1, 3))
nsamples, nx = X_l.shape
X = X_l.reshape((nsamples,nx))
X1 = np.linspace(0, 0, 3)[:, None]
X2 = np.linspace(0, 25, 100)[:, None]
X1_test, X2_test = np.meshgrid(X1, X2)
X_1 = np.reshape(X2_test, (-1, 3))
nsamples, nx = X_1.shape
X_ = X_1.reshape((nsamples,nx))
kernels = [1.0 * RBF(length_scale=3.0, length_scale_bounds=(1e-1, 10.0)),
1.0 * RationalQuadratic(length_scale=3.0, alpha=0.1),
1.0 * Matern(length_scale=3.0, length_scale_bounds=(1e-1, 10.0),
nu=1.5)]
for fig_index, kernel in enumerate(kernels):
# Specify Gaussian Process
gp = GaussianProcessRegressor(kernel=kernel)
y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
y_samples = gp.sample_y(X_[:, np.newaxis], 10)
# Generate data and fit GP
gp.fit(X, Y)
# Plot posterior
y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
y_samples = gp.sample_y(X_[:, np.newaxis], 10)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
C:\Users\yash.bagla\Documents\ML Project\Code\Edit5_GPR_1.py in <module>()
55 plt.subplot(2, 1, 1)
56 #X_ = np.linspace(0, 25, 1000)
---> 57 y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
58 plt.plot(X_, y_mean, 'k', lw=3, zorder=9)
59 plt.fill_between(X_, y_mean - y_std, y_mean + y_std,
C:\Users\yash.bagla\AppData\Local\Enthought\Canopy\edm\envs\User\lib\site-packages\sklearn\gaussian_process\gpr.py in predict(self, X, return_std, return_cov)
312 "returning full covariance.")
313
--> 314 X = check_array(X)
315
316 if not hasattr(self, "X_train_"): # Unfitted;predict based on GP prior
C:\Users\yash.bagla\AppData\Local\Enthought\Canopy\edm\envs\User\lib\site-packages\sklearn\utils\validation.py in check_array(array, accept_sparse, accept_large_sparse, dtype, order, copy, force_all_finite, ensure_2d, allow_nd, ensure_min_samples, ensure_min_features, warn_on_dtype, estimator)
568 if not allow_nd and array.ndim >= 3:
569 raise ValueError("Found array with dim %d. %s expected <= 2."
--> 570 % (array.ndim, estimator_name))
571 if force_all_finite:
572 _assert_all_finite(array,
ValueError: Found array with dim 3. Estimator expected <= 2.
Пожалуйста, дайте мне знать, как работать с входными векторами с более чем двумя измерениями.
Спасибо.