Professional Documents
Culture Documents
import sys
import os
import numpy as np
import pandas as pd
from src.exception import CustomException
from src.logger import logging
import keras
from keras.utils import to_categorical
class DataTransformationConfig:
train_data_path: str=os.path.join('artifacts',"train.csv")
test_data_path: str=os.path.join('artifacts',"test.csv")
class DataTransformation:
def __init__(self):
self.data_transformation_config=DataTransformationConfig()
def data_prep(self,raw):
num_images = raw.shape[0]
x_as_array = raw.values[:,1:]
x_shaped_array = x_as_array.reshape(num_images, img_rows, img_cols, 1)
out_x = x_shaped_array / 255
def initiate_data_transformation(self):
try:
logging.info("Read train and test data")
train_df=pd.read_csv('Dataset/fashion-mnist_train.csv')
test_df=pd.read_csv('Dataset/fashion-mnist_test.csv')
logging.info('Read the dataset as dataframe')
os.makedirs(os.path.dirname(self.data_transformation_config.train_data_path),exist_
ok=True)
except Exception as e:
raise CustomException(e,sys)
----------------------------------------------------------
Model trainer
import os
import sys
from dataclasses import dataclass
from src.exception import CustomException
from src.logger import logging
import tensorflow as tf
from zipfile import ZipFile
import urllib.request as request
from keras.models import Sequential
from pathlib import Path
from tensorflow.keras.layers import Input, Dense, Dropout, Attention
from tensorflow.keras.models import Model
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
from tensorflow import keras
from contextlib import redirect_stdout
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Flatten, Conv2D, Dropout
@dataclass
class ModelTrainerConfig:
trained_model_file_path=os.path.join("artifacts","model.pkl")
class ModelTrainer:
def __init__(self):
self.model_trainer_config=ModelTrainerConfig()
def initiate_model_trainer(self,xtrain_data,ytrain_data):
try:
logging.info("Prepare Base Model..")
self.model = Sequential()
self.model.add(Conv2D(20, kernel_size=(3, 3),
activation='relu',
input_shape=(28, 28, 1)))
self.model.add(Conv2D(20, kernel_size=(3, 3), activation='relu'))
self.model.add(Flatten())
self.model.add(Dense(128, activation='relu'))
self.model.add(Dense(10, activation='softmax'))
logging.info('Model Created..')
self.model.compile(loss=keras.losses.categorical_crossentropy,
optimizer='adam',
metrics=['accuracy'])
self.model.fit(xtrain_data, ytrain_data,
batch_size=128,
epochs=1,
validation_split = 0.2)
save_object(
file_path=self.model_trainer_config.trained_model_file_path,
obj=self.model
)
except Exception as e:
CustomException(e,sys)
--------------------------------------------------------------------------
evaluate model
import sys
import os
import pandas as pd
from src.exception import CustomException
from src.logger import logging
import tensorflow as tf
from src.components.data_transformation import DataTransformation
from src.components.model_trainer import ModelTrainer
class ModelEvaluation:
def __init__(self):
pass
def evaluate_model(self,xtest_data,ytest_data):
try:
x_test=xtest_data
y_test=ytest_data
logging.info('Model Loading..')
model_path=os.path.join("artifacts","model.pkl")
model=load_object(file_path=model_path)
logging.info('Model Evaluating')
accuracy = accuracy_score(y_test, y_pred)
precision = precision_score(y_test, y_pred,average='micro')
recall = recall_score(y_test, y_pred,average='micro')
f1 = f1_score(y_test, y_pred,average='micro')
return y_pred
except Exception as e:
raise CustomException(e,sys)
if __name__=="__main__":
obj=DataTransformation()
xtrain_data,ytrain_data,xtest_data,ytest_data=obj.initiate_data_transformation()
modeltrainer=ModelTrainer()
print(modeltrainer.initiate_model_trainer(xtrain_data,ytrain_data))
modeleval=ModelEvaluation()
print(modeleval.evaluate_model(xtest_data,ytest_data))
import sys
import os
import numpy as np
import pandas as pd
from src.exception import CustomException
from src.logger import logging
import keras
import requests
import zipfile
import io
import os
from keras.utils import to_categorical
class DataTransformationConfig:
# train_data_path: str=os.path.join('artifacts',"train.csv")
# test_data_path: str=os.path.join('artifacts',"test.csv")
root_dir ='artifacts/data_ingestion'
file_url =
'https://github.com/Dipeshshome/DS-AI-ML-DL--IIT-Kharagpur/raw/main/Dataset.zip'
download_path ='artifacts/data_ingestion/data.zip'
extracted_dir ='artifacts/data_ingestion'
class DataTransformation:
def __init__(self):
self.data_transformation_config=DataTransformationConfig()
def data_prep(self,raw):
num_images = raw.shape[0]
x_as_array = raw.values[:,1:]
x_shaped_array = x_as_array.reshape(num_images, img_rows, img_cols, 1)
out_x = x_shaped_array / 255
def initiate_data_transformation(self):
try:
logging.info("Read train and test data")
# Download the file
response = requests.get(file_url)
with open(download_path, 'wb') as file:
file.write(response.content)
# train_df=pd.read_csv('Dataset/fashion-mnist_train.csv')
# test_df=pd.read_csv('Dataset/fashion-mnist_test.csv')
# logging.info('Read the dataset as dataframe')
#
os.makedirs(os.path.dirname(self.data_transformation_config.train_data_path),exist_
ok=True)
except Exception as e:
raise CustomException(e,sys)
if __name__=="__main__":
obj=DataTransformation()
xtrain_data,ytrain_data,xtest_data,ytest_data=obj.initiate_data_transformation()