V0.1.6 - Using the Meta-Model Structure Selection (MetaMSS) algorithm for building Polynomial NARX models
V0.1.6 - Using the Meta-Model Structure Selection (MetaMSS) algorithm for building Polynomial NARX models¶
Example created by Wilson Rocha Lacerda Junior
from sysidentpy.polynomial_basis import MetaMSS
from sysidentpy.utils.generate_data import get_miso_data, get_siso_data
from sysidentpy.metrics import root_relative_squared_error
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df1 = pd.read_csv('examples/datasets/x_cc.csv')
df2 = pd.read_csv('examples/datasets/y_cc.csv')
df2[5000:80000].plot(figsize=(10, 4))
<AxesSubplot:>

# we will decimate the data using d=500 in this example
x_train, x_test = np.split(df1.iloc[::500].values, 2)
y_train, y_test = np.split(df2.iloc[::500].values, 2)
model = MetaMSS(norm=-2,
xlag=3,
n_inputs=1,
ylag=3,
non_degree=2,
estimator="recursive_least_squares",
k_agents_percent=10,
estimate_parameter=True,
maxiter=30,
n_agents=10,
p_value=0.05,
loss_func='metamss_loss',
p_ones=0.5,
p_zeros=0.5)
model.fit(x_train, y_train, x_test, y_test)
yhat = model.predict(x_test, y_test, steps_ahead=None)
rrse = root_relative_squared_error(y_test, yhat)
results = pd.DataFrame(
model.results(err_precision=8, dtype="dec"),
columns=["Regressors", "Parameters", "ERR"],
)
print(results, '\n', rrse)
ee, ex, extras, lam = model.residuals(x_test, y_test, yhat)
model.plot_result(y_test, yhat, ee, ex, n=600)
Regressors Parameters ERR
0 1 -510.5733 0.00000000
1 y(k-1) 1.4864 0.00000000
2 y(k-2) -0.5617 0.00000000
3 y(k-3) 0.0801 0.00000000
4 x1(k-1) 22.7933 0.00000000
5 x1(k-2) 13.1246 0.00000000
6 x1(k-3) -3.6797 0.00000000
7 x1(k-1)y(k-1) -0.1632 0.00000000
8 x1(k-2)y(k-1) -0.0929 0.00000000
9 x1(k-3)y(k-1) 0.0209 0.00000000
10 x1(k-1)y(k-2) 0.0928 0.00000000
11 x1(k-2)y(k-2) 0.0471 0.00000000
12 x1(k-3)y(k-2) -0.0053 0.00000000
13 y(k-3)^2 -0.0000 0.00000000
14 x1(k-1)y(k-3) -0.0192 0.00000000
15 x1(k-2)y(k-3) -0.0081 0.00000000
16 x1(k-1)^2 113.9663 0.00000000
17 x1(k-2)x1(k-1) -1.6796 0.00000000
18 x1(k-3)x1(k-1) 3.7256 0.00000000
19 x1(k-2)^2 65.6228 0.00000000
20 x1(k-3)x1(k-2) -0.7757 0.00000000
21 x1(k-3)^2 -18.3985 0.00000000
0.026685918916669635

# Ploting the evolution of the agents
plt.plot(model.best_by_iter)
model.best_by_iter[-1]
0.0033329091582459066

# You have access to all tested models
# model.tested_models
from sklearn.neighbors import KNeighborsRegressor
from sklearn.svm import SVC, LinearSVC, NuSVC
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor, AdaBoostRegressor, GradientBoostingRegressor
from sklearn.naive_bayes import GaussianNB
from catboost import CatBoostRegressor
from sklearn.linear_model import BayesianRidge, ARDRegression
from sysidentpy.general_estimators import NARX
from sysidentpy.polynomial_basis import PolynomialNarmax
xlag = ylag = 10
estimators = [
('NARX_KNeighborsRegressor', NARX(
base_estimator=KNeighborsRegressor(),
xlag=xlag,
ylag=ylag)),
('NARX_DecisionTreeRegressor', NARX(
base_estimator=DecisionTreeRegressor(),
xlag=xlag,
ylag=ylag)),
('NARX_RandomForestRegressor', NARX(
base_estimator=RandomForestRegressor(
n_estimators=200),
xlag=xlag,
ylag=ylag,
)),
('NARX_Catboost', NARX(
base_estimator=CatBoostRegressor(
iterations=800,
learning_rate=0.1,
depth=8),
xlag=xlag,
ylag=ylag,
non_degree=1,
fit_params={'verbose': False}
)),
('NARX_ARD', NARX(
base_estimator=ARDRegression(),
xlag=xlag,
ylag=ylag,
non_degree=2
)),
('Polynomial_NARX', PolynomialNarmax(non_degree=2,
order_selection=True,
n_info_values=50,
extended_least_squares=False,
ylag=ylag, xlag=xlag,
info_criteria='bic',
estimator='recursive_least_squares',
)),
('MetaMSS', MetaMSS(norm=-2,
xlag=xlag,
n_inputs=1,
ylag=ylag,
non_degree=2,
estimator="recursive_least_squares",
k_agents_percent=2,
estimate_parameter=True,
maxiter=30,
n_agents=10,
p_value=0.05,
loss_func='metamss_loss',
p_ones=0.5,
p_zeros=0.5)
)
]
resultados = {}
for nome_do_modelo, modelo in estimators:
resultados['%s' % (nome_do_modelo)] = []
if nome_do_modelo == 'MetaMSS':
modelo.fit(x_train, y_train, x_test, y_test)
else:
modelo.fit(x_train, y_train)
yhat = modelo.predict(x_test, y_test)
result = root_relative_squared_error(y_test, yhat)
resultados['%s' % (nome_do_modelo)].append(result)
print(nome_do_modelo, '%.3f' % np.mean(result))
for aux_results, results in sorted(resultados.items(), key=lambda x: np.mean(x[1]), reverse=False):
print(aux_results, np.mean(results))
MetaMSS 0.03840745957615934
Polynomial_NARX 0.0461106373498736
NARX_ARD 0.07413356855178779
NARX_DecisionTreeRegressor 0.12749128195675755
NARX_RandomForestRegressor 0.17013726390066733
NARX_Catboost 0.17923040407121107
NARX_KNeighborsRegressor 1.833370478725381