sayyedAhmed
commited on
Commit
·
bbda2dc
1
Parent(s):
65a224a
updating model card with library as transformer
Browse files- lstm_predictor.py +0 -17
- predict.py +0 -77
lstm_predictor.py
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
import torch
|
2 |
-
import torch.nn as nn
|
3 |
-
|
4 |
-
class LSTMPredictor(nn.Module):
|
5 |
-
def __init__(self, input_dim, hidden_dim, output_dim, forecast_horizon=3, n_layers=2, dropout=0.2):
|
6 |
-
super().__init__()
|
7 |
-
self.hidden_dim = hidden_dim
|
8 |
-
self.n_layers = n_layers
|
9 |
-
self.forecast_horizon = forecast_horizon
|
10 |
-
|
11 |
-
self.lstm = nn.LSTM(input_dim, hidden_dim, n_layers, batch_first=True, dropout=dropout)
|
12 |
-
self.fc = nn.Linear(hidden_dim, output_dim * forecast_horizon)
|
13 |
-
|
14 |
-
def forward(self, x):
|
15 |
-
lstm_out, _ = self.lstm(x)
|
16 |
-
predictions = self.fc(lstm_out[:, -1, :])
|
17 |
-
return predictions.view(-1, self.forecast_horizon, predictions.shape[-1] // self.forecast_horizon)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
predict.py
DELETED
@@ -1,77 +0,0 @@
|
|
1 |
-
import json
|
2 |
-
import torch
|
3 |
-
import numpy as np
|
4 |
-
from torch.utils.data import DataLoader, Dataset
|
5 |
-
from lstm_predictor import LSTMPredictor
|
6 |
-
from huggingface_hub import hf_hub_download
|
7 |
-
|
8 |
-
# Dataset Class
|
9 |
-
class TimeSeriesDataset(Dataset):
|
10 |
-
def __init__(self, data, seq_length):
|
11 |
-
self.data = data
|
12 |
-
self.seq_length = seq_length
|
13 |
-
|
14 |
-
def __len__(self):
|
15 |
-
return len(self.data) - self.seq_length
|
16 |
-
|
17 |
-
def __getitem__(self, idx):
|
18 |
-
return torch.tensor(self.data[idx:idx + self.seq_length], dtype=torch.float32)
|
19 |
-
|
20 |
-
# Load Config
|
21 |
-
def load_config(config_path):
|
22 |
-
with open(config_path, 'r') as file:
|
23 |
-
config = json.load(file)
|
24 |
-
return config
|
25 |
-
|
26 |
-
# Load Model from Hugging Face
|
27 |
-
def load_model(config):
|
28 |
-
# Download model from Hugging Face
|
29 |
-
model_file = hf_hub_download(repo_id=config["repo_id"], filename=config["model_path"])
|
30 |
-
|
31 |
-
# Load the model architecture
|
32 |
-
model = LSTMPredictor(
|
33 |
-
input_dim=config["input_dim"],
|
34 |
-
hidden_dim=config["hidden_dim"],
|
35 |
-
output_dim=config["output_dim"],
|
36 |
-
forecast_horizon=config["forecast_horizon"],
|
37 |
-
n_layers=config["n_layers"],
|
38 |
-
dropout=config["dropout"]
|
39 |
-
)
|
40 |
-
# Load weights
|
41 |
-
model.load_state_dict(torch.load(model_file, map_location=torch.device(config["device"])))
|
42 |
-
model.to(config["device"])
|
43 |
-
model.eval()
|
44 |
-
return model
|
45 |
-
|
46 |
-
# Prediction Function
|
47 |
-
def predict(model, dataloader, config):
|
48 |
-
predictions = []
|
49 |
-
with torch.no_grad():
|
50 |
-
for batch in dataloader:
|
51 |
-
batch = batch.to(config["device"])
|
52 |
-
output = model(batch)
|
53 |
-
predictions.append(output.cpu().numpy())
|
54 |
-
return np.vstack(predictions)
|
55 |
-
|
56 |
-
# Main Function
|
57 |
-
def main():
|
58 |
-
config_path = "config.json" # Path to config file
|
59 |
-
config = load_config(config_path)
|
60 |
-
|
61 |
-
# Load test data
|
62 |
-
raw_data = np.load(config["data_path"])
|
63 |
-
dataset = TimeSeriesDataset(raw_data, seq_length=config["seq_length"])
|
64 |
-
dataloader = DataLoader(dataset, batch_size=config["batch_size"], shuffle=False)
|
65 |
-
|
66 |
-
# Load model
|
67 |
-
model = load_model(config)
|
68 |
-
|
69 |
-
# Predict
|
70 |
-
predictions = predict(model, dataloader, config)
|
71 |
-
|
72 |
-
# Save predictions
|
73 |
-
np.save(config["output_path"], predictions)
|
74 |
-
print(f"Predictions saved to {config['output_path']}")
|
75 |
-
|
76 |
-
if __name__ == "__main__":
|
77 |
-
main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|