Я делаю программу, которая подгоняет кусочно-линейную регрессию к 4-5 точкам останова в данных, а затем решаю, сколько точек останова лучше всего предотвратить, чтобы избежать чрезмерного или недостаточного соответствия. Тем не менее, мой код крайне медленен для запуска из-за того, насколько он изящен.
Черновик моего кода:
import numpy as np
import pandas as pd
from scipy.optimize import curve_fit, differential_evolution
import matplotlib.pyplot as plt
import warnings
def segmentedRegression_two(xData,yData):
def func(xVals,break1,break2,slope1,offset1,slope_mid,offset_mid,slope2,offset2):
returnArray=[]
for x in xVals:
if x < break1:
returnArray.append(slope1 * x + offset1)
elif (np.logical_and(x >= break1,x<break2)):
returnArray.append(slope_mid * x + offset_mid)
else:
returnArray.append(slope2 * x + offset2)
return returnArray
def sumSquaredError(parametersTuple): #Definition of an error function to minimize
model_y=func(xData,*parametersTuple)
warnings.filterwarnings("ignore") # Ignore warnings by genetic algorithm
return np.sum((yData-model_y)**2.0)
def generate_genetic_Parameters():
initial_parameters=[]
x_max=np.max(xData)
x_min=np.min(xData)
y_max=np.max(yData)
y_min=np.min(yData)
slope=10*(y_max-y_min)/(x_max-x_min)
initial_parameters.append([x_max,x_min]) #Bounds for model break point
initial_parameters.append([x_max,x_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
result=differential_evolution(sumSquaredError,initial_parameters,seed=3)
return result.x
geneticParameters = generate_genetic_Parameters() #Generates genetic parameters
fittedParameters, pcov= curve_fit(func, xData, yData, geneticParameters) #Fits the data
print('Parameters:', fittedParameters)
model=func(xData,*fittedParameters)
absError = model - yData
SE = np.square(absError)
MSE = np.mean(SE)
RMSE = np.sqrt(MSE)
Rsquared = 1.0 - (np.var(absError) / np.var(yData))
return Rsquared
def segmentedRegression_three(xData,yData):
def func(xVals,break1,break2,break3,slope1,offset1,slope2,offset2,slope3,offset3,slope4,offset4):
returnArray=[]
for x in xVals:
if x < break1:
returnArray.append(slope1 * x + offset1)
elif (np.logical_and(x >= break1,x<break2)):
returnArray.append(slope2 * x + offset2)
elif (np.logical_and(x >= break2,x<break3)):
returnArray.append(slope3 * x + offset3)
else:
returnArray.append(slope4 * x + offset4)
return returnArray
def sumSquaredError(parametersTuple): #Definition of an error function to minimize
model_y=func(xData,*parametersTuple)
warnings.filterwarnings("ignore") # Ignore warnings by genetic algorithm
return np.sum((yData-model_y)**2.0)
def generate_genetic_Parameters():
initial_parameters=[]
x_max=np.max(xData)
x_min=np.min(xData)
y_max=np.max(yData)
y_min=np.min(yData)
slope=10*(y_max-y_min)/(x_max-x_min)
initial_parameters.append([x_max,x_min]) #Bounds for model break point
initial_parameters.append([x_max,x_min])
initial_parameters.append([x_max,x_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
result=differential_evolution(sumSquaredError,initial_parameters,seed=3)
return result.x
geneticParameters = generate_genetic_Parameters() #Generates genetic parameters
fittedParameters, pcov= curve_fit(func, xData, yData, geneticParameters) #Fits the data
print('Parameters:', fittedParameters)
model=func(xData,*fittedParameters)
absError = model - yData
SE = np.square(absError)
MSE = np.mean(SE)
RMSE = np.sqrt(MSE)
Rsquared = 1.0 - (np.var(absError) / np.var(yData))
return Rsquared
def segmentedRegression_four(xData,yData):
def func(xVals,break1,break2,break3,break4,slope1,offset1,slope2,offset2,slope3,offset3,slope4,offset4,slope5,offset5):
returnArray=[]
for x in xVals:
if x < break1:
returnArray.append(slope1 * x + offset1)
elif (np.logical_and(x >= break1,x<break2)):
returnArray.append(slope2 * x + offset2)
elif (np.logical_and(x >= break2,x<break3)):
returnArray.append(slope3 * x + offset3)
elif (np.logical_and(x >= break3,x<break4)):
returnArray.append(slope4 * x + offset4)
else:
returnArray.append(slope5 * x + offset5)
return returnArray
def sumSquaredError(parametersTuple): #Definition of an error function to minimize
model_y=func(xData,*parametersTuple)
warnings.filterwarnings("ignore") # Ignore warnings by genetic algorithm
return np.sum((yData-model_y)**2.0)
def generate_genetic_Parameters():
initial_parameters=[]
x_max=np.max(xData)
x_min=np.min(xData)
y_max=np.max(yData)
y_min=np.min(yData)
slope=10*(y_max-y_min)/(x_max-x_min)
initial_parameters.append([x_max,x_min]) #Bounds for model break point
initial_parameters.append([x_max,x_min])
initial_parameters.append([x_max,x_min])
initial_parameters.append([x_max,x_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([-y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
initial_parameters.append([-slope,slope])
initial_parameters.append([y_max,y_min])
result=differential_evolution(sumSquaredError,initial_parameters,seed=3)
return result.x
geneticParameters = generate_genetic_Parameters() #Generates genetic parameters
fittedParameters, pcov= curve_fit(func, xData, yData, geneticParameters) #Fits the data
print('Parameters:', fittedParameters)
model=func(xData,*fittedParameters)
absError = model - yData
SE = np.square(absError)
MSE = np.mean(SE)
RMSE = np.sqrt(MSE)
Rsquared = 1.0 - (np.var(absError) / np.var(yData))
return Rsquared
И отсюда, до сих пор думая что-то вроде этого:
r2s=[segmentedRegression_two(xData,yData),segmentedRegression_three(xData,yData),segmentedRegression_four(xData,yData)]
best_fit=np.max(r2s)
Хотя мне может понадобиться AIC или что-то в этом роде.
Есть ли способ, как я могу сделать это более эффективным при работе?