NeuralForecast 推理 - 数据集从文件dataset.pkl读
flyfish
from ray import tune
from neuralforecast.core import NeuralForecast
from neuralforecast.auto import AutoMLP
from neuralforecast.models import NBEATS, NHITS
import torch
import torch.nn as nn
import os
import pickle
import warnings
from copy import deepcopy
from itertools import chain
from typing import Any, Dict, List, Optional, Unionimport fsspec
import numpy as np
import pandas as pdfrom neuralforecast.models import (GRU,LSTM,RNN,TCN,DeepAR,DilatedRNN,MLP,NHITS,NBEATS,NBEATSx,DLinear,NLinear,TFT,VanillaTransformer,Informer,Autoformer,FEDformer,StemGNN,PatchTST,TimesNet,TimeLLM,TSMixer,
)
MODEL_FILENAME_DICT = {"autoformer": Autoformer,"autoautoformer": Autoformer,"deepar": DeepAR,"autodeepar": DeepAR,"dlinear": DLinear,"autodlinear": DLinear,"nlinear": NLinear,"autonlinear": NLinear,"dilatedrnn": DilatedRNN,"autodilatedrnn": DilatedRNN,"fedformer": FEDformer,"autofedformer": FEDformer,"gru": GRU,"autogru": GRU,"informer": Informer,"autoinformer": Informer,"lstm": LSTM,"autolstm": LSTM,"mlp": MLP,"automlp": MLP,"nbeats": NBEATS,"autonbeats": NBEATS,"nbeatsx": NBEATSx,"autonbeatsx": NBEATSx,"nhits": NHITS,"autonhits": NHITS,"patchtst": PatchTST,"autopatchtst": PatchTST,"rnn": RNN,"autornn": RNN,"stemgnn": StemGNN,"autostemgnn": StemGNN,"tcn": TCN,"autotcn": TCN,"tft": TFT,"autotft": TFT,"timesnet": TimesNet,"autotimesnet": TimesNet,"vanillatransformer": VanillaTransformer,"autovanillatransformer": VanillaTransformer,"timellm": TimeLLM,"tsmixer": TSMixer,"autotsmixer": TSMixer,
}
#model_path1 = "checkpoints\\test_run\\automlp_0.ckpt"
model_path = "checkpoints\\test_run"dataset_path = "checkpoints\\test_run\\dataset.pkl"def load(path, verbose=False, **kwargs):# Standarize path without '/'if path[-1] == "/":path = path[:-1]fs, _, paths = fsspec.get_fs_token_paths(path)files = [f.split("/")[-1] for f in fs.ls(path) if fs.isfile(f)]# Load modelsmodels_ckpt = [f for f in files if f.endswith(".ckpt")]if len(models_ckpt) == 0:raise Exception("No model found in directory.")if verbose:print(10 * "-" + " Loading models " + 10 * "-")models = []try:with fsspec.open(f"{path}/alias_to_model.pkl", "rb") as f:alias_to_model = pickle.load(f)except FileNotFoundError:alias_to_model = {}for model in models_ckpt:model_name = model.split("_")[0]model_class_name = alias_to_model.get(model_name, model_name)models.append(MODEL_FILENAME_DICT[model_class_name].load_from_checkpoint(f"{path}/{model}", **kwargs))if verbose:print(f"Model {model_name} loaded.")return modelsmodels = load(model_path,verbose=True)
print(models[0])
model = models[0]
model.eval()# Load dataset
def load_dataset(path, verbose=True):try:with fsspec.open(f"{path}/dataset.pkl", "rb") as f:dataset = pickle.load(f)print(dataset)if verbose:print("Dataset loaded.")except FileNotFoundError:dataset = Noneif verbose:print("No dataset found in directory.")return datasetdata=pd.read_pickle(dataset_path)
print("data:",data)trimmed_dataset = load_dataset(model_path)
print(trimmed_dataset)#TimeSeriesDataset(n_data=96, n_groups=1)step_size =1
model_fcsts = model.predict(trimmed_dataset, step_size=step_size)
print(model_fcsts)