Upload inference.py with huggingface_hub
Browse files- inference.py +46 -0
inference.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import numpy as np
|
3 |
+
import pandas as pd
|
4 |
+
import yfinance as yf
|
5 |
+
from keras.models import load_model
|
6 |
+
from sklearn.preprocessing import MinMaxScaler
|
7 |
+
|
8 |
+
# Load model
|
9 |
+
model = load_model("stock_dl_model.h5")
|
10 |
+
|
11 |
+
def predict(stock_symbol="POWERGRID.NS"):
|
12 |
+
start = pd.to_datetime("2000-01-01")
|
13 |
+
end = pd.to_datetime("2024-10-01")
|
14 |
+
df = yf.download(stock_symbol, start=start, end=end)
|
15 |
+
|
16 |
+
data_training = pd.DataFrame(df['Close'][0:int(len(df)*0.70)])
|
17 |
+
data_testing = pd.DataFrame(df['Close'][int(len(df)*0.70): int(len(df))])
|
18 |
+
|
19 |
+
scaler = MinMaxScaler(feature_range=(0, 1))
|
20 |
+
data_training_array = scaler.fit_transform(data_training)
|
21 |
+
|
22 |
+
past_100_days = data_training.tail(100)
|
23 |
+
final_df = past_100_days.append(data_testing, ignore_index=True)
|
24 |
+
input_data = scaler.fit_transform(final_df)
|
25 |
+
|
26 |
+
x_test, y_test = [], []
|
27 |
+
for i in range(100, input_data.shape[0]):
|
28 |
+
x_test.append(input_data[i-100:i])
|
29 |
+
y_test.append(input_data[i, 0])
|
30 |
+
|
31 |
+
x_test, y_test = np.array(x_test), np.array(y_test)
|
32 |
+
|
33 |
+
y_predicted = model.predict(x_test)
|
34 |
+
|
35 |
+
scaler = scaler.scale_
|
36 |
+
scale_factor = 1 / scaler[0]
|
37 |
+
y_predicted = y_predicted * scale_factor
|
38 |
+
y_test = y_test * scale_factor
|
39 |
+
|
40 |
+
return {
|
41 |
+
"prediction": y_predicted.tolist()
|
42 |
+
}
|
43 |
+
|
44 |
+
def __call__(self, inputs):
|
45 |
+
stock_symbol = inputs.get("inputs", "POWERGRID.NS")
|
46 |
+
return predict(stock_symbol)
|