NeuralForecast 推理 - 从csv文件里读取数据进行推理
flyfish
from ray import tunefrom neuralforecast.core import NeuralForecast
from neuralforecast.auto import AutoMLP
from neuralforecast.models import NBEATS, NHITS
import torch
import torch.nn as nn
import os
import pickle
import warnings
from copy import deepcopy
from itertools import chain
from typing import Any, Dict, List, Optional, Unionimport fsspec
import numpy as np
import pandas as pdfrom neuralforecast.tsdataset import TimeSeriesDatasetfrom neuralforecast.models import (GRU,LSTM,RNN,TCN,DeepAR,DilatedRNN,MLP,NHITS,NBEATS,NBEATSx,DLinear,NLinear,TFT,VanillaTransformer,Informer,Autoformer,FEDformer,StemGNN,PatchTST,TimesNet,TimeLLM,TSMixer,
)
MODEL_FILENAME_DICT = {"autoformer": Autoformer,"autoautoformer": Autoformer,"deepar": DeepAR,"autodeepar": DeepAR,"dlinear": DLinear,"autodlinear": DLinear,"nlinear": NLinear,"autonlinear": NLinear,"dilatedrnn": DilatedRNN,"autodilatedrnn": DilatedRNN,"fedformer": FEDformer,"autofedformer": FEDformer,"gru": GRU,"autogru": GRU,"informer": Informer,"autoinformer": Informer,"lstm": LSTM,"autolstm": LSTM,"mlp": MLP,"automlp": MLP,"nbeats": NBEATS,"autonbeats": NBEATS,"nbeatsx": NBEATSx,"autonbeatsx": NBEATSx,"nhits": NHITS,"autonhits": NHITS,"patchtst": PatchTST,"autopatchtst": PatchTST,"rnn": RNN,"autornn": RNN,"stemgnn": StemGNN,"autostemgnn": StemGNN,"tcn": TCN,"autotcn": TCN,"tft": TFT,"autotft": TFT,"timesnet": TimesNet,"autotimesnet": TimesNet,"vanillatransformer": VanillaTransformer,"autovanillatransformer": VanillaTransformer,"timellm": TimeLLM,"tsmixer": TSMixer,"autotsmixer": TSMixer,
}
#model_path1 = "checkpoints\\test_run\\automlp_0.ckpt"
model_path = "checkpoints\\test_run"dataset_path = "checkpoints\\test_run\\dataset.pkl"def load(path, verbose=False, **kwargs):# Standarize path without '/'if path[-1] == "/":path = path[:-1]fs, _, paths = fsspec.get_fs_token_paths(path)files = [f.split("/")[-1] for f in fs.ls(path) if fs.isfile(f)]# Load modelsmodels_ckpt = [f for f in files if f.endswith(".ckpt")]if len(models_ckpt) == 0:raise Exception("No model found in directory.")if verbose:print(10 * "-" + " Loading models " + 10 * "-")models = []try:with fsspec.open(f"{path}/alias_to_model.pkl", "rb") as f:alias_to_model = pickle.load(f)except FileNotFoundError:alias_to_model = {}for model in models_ckpt:model_name = model.split("_")[0]model_class_name = alias_to_model.get(model_name, model_name)models.append(MODEL_FILENAME_DICT[model_class_name].load_from_checkpoint(f"{path}/{model}", **kwargs))if verbose:print(f"Model {model_name} loaded.")return modelsmodels = load(model_path,verbose=True)
print(models[0])
model = models[0]
model.eval()
print(model)df = pd.read_csv('./test.csv')
df['unique_id'] =1
df=df.rename(columns= {'OT':'y'})
df['ds'] = pd.to_datetime(df['ds'] )tsd=TimeSeriesDataset.from_df(df)
print("tsd:",tsd)print(tsd[0])old_test_size = model.get_test_size()
print("old_test_size:",old_test_size)
model_fcsts = model.predict(tsd[0])
print(model_fcsts)