Pyhton p-value hatası sebebi nedir

import numpy as np 
import pandas as pd 
import matplotlib.pyplot as plt 
import statsmodels.api as sm

data = pd.read_csv("ETHUSDT 15M.csv")
print(data.head())


print(data.corr())

sutun_isimler = ["Index","Zaman","Acılıs","Yuksek","Dusuk","Kapanıs","Hacim"]
data.columns = sutun_isimler
print(data.head())


data = data.drop(["Index"],axis=1)
data['Zaman'] = pd.to_numeric(pd.to_datetime(data['Zaman']))

a = data.iloc[152828:,:2]
b = data.iloc[152828:,-1]
x = pd.concat([a,b],axis=1)
y = data.drop(["Zaman","Acılıs","Hacim"],axis=1)
y = y.iloc[152828:,:]

X = x.values
Y = y.values

from sklearn.model_selection import train_test_split

x_train, x_test, y_train, y_test = train_test_split(X,Y,test_size=0.25,random_state=0)

from sklearn.linear_model import LinearRegression
lin_reg = LinearRegression()
lin_reg.fit(x_train,y_train)
lin_reg_predict = lin_reg.predict(x_test)

num = pd.to_numeric(pd.to_datetime(["2022-12-27 06:00:00"]))

print(lin_reg.predict([[1672120800000000000,3800,1500]]))
#print(lin_reg.predict(pd.to_numeric(pd.to_datetime([“2022-12-27 06:00:00”]))[0],3800,1500))



from sklearn.preprocessing import PolynomialFeatures
x_poly = PolynomialFeatures(degree=3)
poly_reg = x_poly.fit_transform(x_train)

lin_reg_poly = LinearRegression()
lin_reg_poly.fit(poly_reg,y_train)
lin_reg_poly_predcit = lin_reg_poly.predict(x_poly.fit_transform(x_test))

print(lin_reg_poly.predict(x_poly.fit_transform([[1672120800000000000,3800,1500]])))
print(lin_reg_poly.predict(x_poly.fit_transform([[num[0],3800,1500]])))

print(x_train.shape)
print(y_train.shape)
"""
from sklearn.preprocessing import StandardScaler
sc1 = StandardScaler()
sc2 = StandardScaler()
sc3 = StandardScaler()
x_trainsc = sc1.fit_transform(x_train.reshape(-1,1))
x_testsc = sc2.fit_transform(x_test.reshape(-1,1))
y_testsc = sc3.fit_transform(y_test.reshape(-1,1))
from sklearn.svm import SVR
svr1 = SVR(kernel="rbf")
svr1.fit(x_trainsc,y_train.ravel())
svr_predict = svr1.predict(x_testsc)
"""

from sklearn.tree import DecisionTreeRegressor
dt = DecisionTreeRegressor()
dt.fit(x_train,y_train)
dt_predict = dt.predict(x_test)

print(dt.predict([[num[0],3800,1500]]))

from sklearn.ensemble import RandomForestRegressor
rf = RandomForestRegressor(n_estimators=10, random_state=0)
rf.fit(x_train,y_train)
rf_predcit = rf.predict(x_test)
print(rf.predict([[num[0],3800,1500]]))

from sklearn.metrics import r2_score
print("r2_score--------------------")
print("linear regrassion")
print(r2_score(y_test,lin_reg_predict))
print("polynımal regrassion")
print(r2_score(y_test,lin_reg_poly_predcit))
print("decision tree")
print(r2_score(y_test,dt_predict))
print( "random forest regrassıon ")
print(r2_score(y_test,rf_predcit))
print("svr")
#print(r2_score(y_testsc,svr_predict))


print("p-value-------------------")
model = sm.OLS(lin_reg.predict(x_train),x_train)
print(model.fit().summary())

hatam şu şekildedir:
ValueError: shapes (25255,3) and (25255,3) not aligned: 3 (dim 1) != 25255 (dim 0)

nedeni nedir acaba