In [1]: import pandas as pd
import numpy as np
import statsmodels.api as sm
# Datos
data = [
[1, 1962, 51.1, 560.3, 0.6, 16, 0],
[2, 1963, 52.3, 590.5, 0.9, 16.4, 0],
[3, 1964, 53.6, 632.4, 1.1, 16.7, 0],
[4, 1965, 49.6, 684.9, 1.4, 17, 1],
[5, 1966, 56.8, 749.9, 1.6, 20.2, 1],
[6, 1967, 70.1, 793.9, 1, 23.4, 1],
[7, 1968, 80.5, 865, 0.8, 25.6, 1],
[8, 1969, 81.2, 931.4, 1.5, 24.6, 1],
[9, 1970, 80.3, 992.7, 1, 24.8, 1],
[10, 1971, 77.7, 1077.6, 1.5, 21.7, 1],
[11, 1972, 78.3, 1185.9, 2.95, 21.5, 1],
[12, 1973, 74.5, 1326.4, 4.8, 24.3, 0],
[13, 1974, 77.8, 1434.2, 10.3, 26.8, 0],
[14, 1975, 85.6, 1549.2, 16, 29.5, 0],
[15, 1976, 89.4, 1718, 14.7, 30.4, 0],
[16, 1977, 97.5, 1918.3, 8.3, 33.3, 0],
[17, 1978, 105.2, 2163.9, 11, 38, 0],
[18, 1979, 117.7, 2417.8, 13, 46.2, 0],
[19, 1980, 135.9, 2633.1, 15.3, 57.6, 0],
[20, 1981, 162.1, 2937.7, 18, 68.9, 0],
]
# Crear DataFrame
columns = ["t", "Año", "Y", "X2", "X3", "X4", "X5"]
df = pd.DataFrame(data, columns=columns)
# Variables dependiente e independientes
X = df[["X2", "X3", "X4", "X5"]]
X = sm.add_constant(X) # Agregar constante (intercepto)
y = df["Y"]
# Ajustar modelo de regresión
model = sm.OLS(y, X).fit()
model_summary = model.summary()
model_params = model.params
model_summary, model_params
Out[1]: (<class 'statsmodels.iolib.summary.Summary'>
"""
OLS Regression Results
==============================================================================
Dep. Variable: Y R-squared: 0.978
Model: OLS Adj. R-squared: 0.972
Method: Least Squares F-statistic: 163.7
Date: Sun, 20 Apr 2025 Prob (F-statistic): 3.52e-12
Time: 16:38:30 Log-Likelihood: -57.206
No. Observations: 20 AIC: 124.4
Df Residuals: 15 BIC: 129.4
Df Model: 4
Covariance Type: nonrobust
==============================================================================
coef std err t P>|t| [0.025 0.975]
------------------------------------------------------------------------------
const 19.4434 3.406 5.708 0.000 12.184 26.703
X2 0.0181 0.006 2.817 0.013 0.004 0.032
X3 -0.2842 0.457 -0.622 0.544 -1.259 0.690
X4 1.3432 0.259 5.181 0.000 0.791 1.896
X5 6.3318 3.030 2.090 0.054 -0.126 12.789
==============================================================================
Omnibus: 7.350 Durbin-Watson: 0.910
Prob(Omnibus): 0.025 Jarque-Bera (JB): 4.891
Skew: -1.132 Prob(JB): 0.0867
Kurtosis: 3.861 Cond. No. 5.50e+03
==============================================================================
Notes:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
[2] The condition number is large, 5.5e+03. This might indicate that there are
strong multicollinearity or other numerical problems.
""",
const 19.443447
X2 0.018056
X3 -0.284220
X4 1.343195
X5 6.331794
dtype: float64)
In [ ]: