8000 API by thiyasizwekubeka · Pull Request #7 · Explore-AI/regression-predict-api-template · GitHub
[go: up one dir, main page]

Skip to content
This repository was archived by the owner on Nov 23, 2023. It is now read-only.

API #7

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
1,362 changes: 1,362 additions & 0 deletions Sp.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion api.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
# Load our model into memory.
# Please update this path to reflect your own trained model.
static_model = load_model(
path_to_model='assets/trained-models/sendy_simple_lm_regression.pkl')
path_to_model='../trained-models/sendy_simple_lm_regression.pkl')

print ('-'*40)
print ('Model succesfully loaded')
Expand Down
Binary file added assets/.DS_Store
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 4
}
52 changes: 52 additions & 0 deletions assets/trained-models/Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"'/Users/khwezilokusakubeka/Documents/regression-api-team4/assets/trained-models'"
]
},
"execution_count": 1,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"pwd"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
16 changes: 13 additions & 3 deletions model.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,19 @@ def _preprocess_data(data):
# ---------------------------------------------------------------

# ----------- Replace this code with your own preprocessing steps --------
predict_vector = feature_vector_df[['Pickup Lat','Pickup Long',
'Destination Lat','Destination Long']]
# ------------------------------------------------------------------------

#train = pd.read_csv("Train.csv")
#test = pd.read_csv("Test.csv")

new = feature_vector_df.drop(['Arrival at Destination - Day of Month',
'Arrival at Destination - Weekday (Mo = 1)',
'Arrival at Destination - Time', 'Distance (KM)'],
axis=1)
#y = np.array(new['Time from Pickup to Arrival']).reshape(-1, 1)

#df = pd.concat([new,test])

predict_vector = new.fillna(1)

return predict_vector

Expand Down
Binary file added utils/.DS_Store
Binary file not shown.
Binary file added utils/data/.DS_Store
Binary file not shown.
7,069 changes: 7,069 additions & 0 deletions utils/data/Test.csv

Large diffs are not rendered by default.

39,223 changes: 21,202 additions & 18,021 deletions utils/data/train_data.csv → utils/data/Train.csv

Large diffs are not rendered by default.

3,182 changes: 0 additions & 3,182 deletions utils/data/test_data.csv

This file was deleted.

4 changes: 2 additions & 2 deletions utils/request.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
# Load data from file to send as an API POST request.
# We prepare a DataFrame with the public test set + riders data
# from the Zindi challenge.
test = pd.read_csv('data/test_data.csv')
test = pd.read_csv('data/Test.csv')
riders = pd.read_csv('data/riders.csv')
test = test.merge(riders, how='left', on='Rider Id')

Expand All @@ -38,7 +38,7 @@
# replace the URL below with its public IP:

# url = 'http://{public-ip-address-of-remote-machine}:5000/api_v0.1'
url = 'http://127.0.0.1:5000/api_v0.1'
url = 'http://176.34.142.77:5000/api_v0.1'

# Perform the POST request.
print(f"Sending POST request to web server API at: {url}")
Expand Down
54 changes: 44 additions & 10 deletions utils/train_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,25 +10,59 @@
"""

# Dependencies
#%matplotlib notebook
import numpy as np
import pandas as pd
import pickle
import matplotlib.pyplot as plt
import seaborn as sns
from mpl_toolkits.mplot3d import Axes3D
from sklearn.compose import ColumnTransformer
from sklearn.preprocessing import OneHotEncoder
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
import pickle

# Fetch training data and preprocess for modeling
train = pd.read_csv('data/train_data.csv')
riders = pd.read_csv('data/riders.csv')
train = train.merge(riders, how='left', on='Rider Id')
train = pd.read_csv("data/Train.csv")
test = pd.read_csv("data/Test.csv")

new = train.drop(['Arrival at Destination - Day of Month',
'Arrival at Destination - Weekday (Mo = 1)',
'Arrival at Destination - Time', 'Distance (KM)'],
axis=1)
y = np.array(new['Time from Pickup to Arrival']).reshape(-1, 1)
df = pd.concat([new,test])
df_1 = df.fillna(1)

df_2 = df.drop(['Order No', 'User Id', 'Vehicle Type', 'Rider Id','Placement - Weekday (Mo = 1)', 'Confirmation - Day of Month', 'Confirmation - Weekday (Mo = 1)', 'Arrival at Pickup - Day of Month', 'Arrival at Pickup - Weekday (Mo = 1)', 'Pickup - Day of Month','Pickup - Weekday (Mo = 1)', 'Placement - Time', 'Confirmation - Time', 'Arrival at Pickup - Time', 'Pickup - Time'], axis = 1)
df_2 = df_2.fillna(1)
df_2 = pd.get_dummies(df_2, drop_first=True)
X = df_2[:len(train)]
Y = df_2[len(train):]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 0)

y_train = train[['Time from Pickup to Arrival']]
X_train = train[['Pickup Lat','Pickup Long',
'Destination Lat','Destination Long']]

# Fit model
lm_regression = LinearRegression(normalize=True)
regressor = LinearRegression()
print ("Training Model...")
lm_regression.fit(X_train, y_train)
regressor.fit(X_train, y_train)
pred = regressor.predict(X_test)


def rmse(y_test, y_predict):
return np.sqrt(mean_squared_error(y_test, y_predict))

rmse(y_test, pred)

y_pred = regressor.predict(Y)
test = test.fillna(0)

test_1 = test[['Order No']]
test_1['Time from Pickup to Arrival'] = y_pred

# Pickle model for use within our API
save_path = '../trained-models/sendy_simple_lm_regression.pkl'
print (f"Training completed. Saving model to: {save_path}")
pickle.dump(lm_regression, open(save_path,'wb'))
pickle.dump(regressor, open(save_path,'wb'))
Empty file added utils/untitled
Empty file.
0