import os
import time
import cv2
import numpy as np
import pandas as pd
from keras.applications.vgg19 import VGG19
from keras.preprocessing import image
from keras.preprocessing.image import ImageDataGenerator
from keras.applications.vgg19 import preprocess_input
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D, Dropout
from sklearn.metrics import classification_report, roc_curve, auc
import matplotlib.pyplot as plt
from keras.utils import to_categorical
from keras.optimizers import Adam
from keras.losses import categorical_crossentropy

# Load VGG19 model
base_model = VGG19(weights='imagenet', include_top=False)
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.5)(x)  # Add dropout for regularization
output = Dense(2, activation='softmax')(x)  # new output layer
model = Model(inputs=base_model.input, outputs=output)

# Freeze the initial layers of VGG19
for layer in base_model.layers:
    layer.trainable = False

# Compile the model
model.compile(optimizer=Adam(), loss=categorical_crossentropy, metrics=['accuracy'])

# Function to detect and crop chest area
def detect_and_crop_chest(image):
    gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
    chest_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_fullbody.xml')
    chests = chest_cascade.detectMultiScale(gray, 1.1, 4)
    for (x, y, w, h) in chests:
        return image[y:y+h, x:x+w]
    return image  # return original image if no chest is detected

# Load and preprocess images
def load_images_from_folder(folder):
    images = []
    for filename in os.listdir(folder):
        img = cv2.imread(os.path.join(folder, filename))
        if img is not None:
            img = detect_and_crop_chest(img)
            img = cv2.resize(img, (224, 224))
            images.append(img)
    return images

# Load training images
train_dir = 'D:\\kaggle\\final\\data1\\train'
cancer_train_dir = os.path.join(train_dir, 'cancer')
normal_train_dir = os.path.join(train_dir, 'normal')

cancer_train_images = load_images_from_folder(cancer_train_dir)
normal_train_images = load_images_from_folder(normal_train_dir)

# Load testing images
test_dir = 'D:\\kaggle\\final\\data1\\test'
cancer_test_dir = os.path.join(test_dir, 'cancer')
normal_test_dir = os.path.join(test_dir, 'normal')

cancer_test_images = load_images_from_folder(cancer_test_dir)
normal_test_images = load_images_from_folder(normal_test_dir)

# Create labels
cancer_train_labels = [1 for _ in range(len(cancer_train_images))]
normal_train_labels = [0 for _ in range(len(normal_train_images))]

cancer_test_labels = [1 for _ in range(len(cancer_test_images))]
normal_test_labels = [0 for _ in range(len(normal_test_images))]

# Combine data
X_train = np.array(cancer_train_images + normal_train_images)
y_train = np.array(cancer_train_labels + normal_train_labels)

X_test = np.array(cancer_test_images + normal_test_images)
y_test = np.array(cancer_test_labels + normal_test_labels)

# Preprocess images
X_train = preprocess_input(X_train)
X_test = preprocess_input(X_test)

# Convert labels to categorical
y_train = to_categorical(y_train, num_classes=2)
y_test = to_categorical(y_test, num_classes=2)

# Data augmentation
datagen = ImageDataGenerator(
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True,
    fill_mode='nearest'
)

# Train model with data augmentation
start_time = time.time()
model.fit(datagen.flow(X_train, y_train, batch_size=32), epochs=10, validation_data=(X_test, y_test))

# Unfreeze some layers and continue training
for layer in base_model.layers[-4:]:  # Unfreeze the last 4 layers
    layer.trainable = True

# Recompile the model with a lower learning rate
model.compile(optimizer=Adam(1e-5), loss=categorical_crossentropy, metrics=['accuracy'])

# Continue training with fine-tuning
model.fit(datagen.flow(X_train, y_train, batch_size=32), epochs=10, validation_data=(X_test, y_test))
end_time = time.time()

# Predict on test set
y_pred = model.predict(X_test)

# Convert predictions to labels
y_pred_labels = np.argmax(y_pred, axis=1)
y_test_labels = np.argmax(y_test, axis=1)

# Print classification report
print(classification_report(y_test_labels, y_pred_labels))

# Calculate and plot ROC curve and AUC
fpr, tpr, _ = roc_curve(y_test_labels, y_pred_labels)
roc_auc = auc(fpr, tpr)
plt.figure()
plt.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver Operating Characteristic')
plt.legend(loc="lower right")
plt.show()

# Print program running time
print(f'Program running time: {end_time - start_time}')


'''
Epoch 1/10
10/10 [==============================] - 569s 57s/step - loss: 1.4086 - accuracy: 0.7789 - val_loss: 1.8321 - val_accuracy: 0.7624
Epoch 2/10
10/10 [==============================] - 526s 52s/step - loss: 0.7061 - accuracy: 0.8845 - val_loss: 1.8937 - val_accuracy: 0.8614
Epoch 3/10
10/10 [==============================] - 440s 46s/step - loss: 0.2478 - accuracy: 0.9472 - val_loss: 2.0511 - val_accuracy: 0.8515
Epoch 4/10
10/10 [==============================] - 525s 53s/step - loss: 0.2805 - accuracy: 0.9439 - val_loss: 3.2256 - val_accuracy: 0.7822
Epoch 5/10
10/10 [==============================] - 475s 46s/step - loss: 0.1677 - accuracy: 0.9703 - val_loss: 1.8724 - val_accuracy: 0.8713
Epoch 6/10
10/10 [==============================] - 483s 50s/step - loss: 0.0910 - accuracy: 0.9703 - val_loss: 2.0709 - val_accuracy: 0.8317
Epoch 7/10
10/10 [==============================] - 521s 52s/step - loss: 0.1619 - accuracy: 0.9670 - val_loss: 2.1987 - val_accuracy: 0.8416
Epoch 8/10
10/10 [==============================] - 476s 47s/step - loss: 0.0920 - accuracy: 0.9802 - val_loss: 2.8617 - val_accuracy: 0.8119
Epoch 9/10
10/10 [==============================] - 492s 48s/step - loss: 0.0961 - accuracy: 0.9901 - val_loss: 3.9417 - val_accuracy: 0.7525
Epoch 10/10
10/10 [==============================] - 436s 42s/step - loss: 0.1452 - accuracy: 0.9538 - val_loss: 3.0502 - val_accuracy: 0.7921
Epoch 1/10
10/10 [==============================] - 419s 42s/step - loss: 0.0895 - accuracy: 0.9769 - val_loss: 2.1374 - val_accuracy: 0.8515
Epoch 2/10
10/10 [==============================] - 368s 35s/step - loss: 0.0923 - accuracy: 0.9802 - val_loss: 3.1595 - val_accuracy: 0.7921
Epoch 3/10
10/10 [==============================] - 459s 47s/step - loss: 0.0126 - accuracy: 0.9934 - val_loss: 3.3136 - val_accuracy: 0.7822
Epoch 4/10
10/10 [==============================] - 521s 53s/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 3.2919 - val_accuracy: 0.7822
Epoch 5/10
10/10 [==============================] - 530s 52s/step - loss: 0.0451 - accuracy: 0.9835 - val_loss: 2.9602 - val_accuracy: 0.7921
Epoch 6/10
10/10 [==============================] - 488s 52s/step - loss: 0.0036 - accuracy: 1.0000 - val_loss: 4.3709 - val_accuracy: 0.7327
Epoch 7/10
10/10 [==============================] - 503s 53s/step - loss: 0.0408 - accuracy: 0.9868 - val_loss: 2.0652 - val_accuracy: 0.8515
Epoch 8/10
10/10 [==============================] - 511s 52s/step - loss: 0.0023 - accuracy: 1.0000 - val_loss: 3.1122 - val_accuracy: 0.7921
Epoch 9/10
10/10 [==============================] - 308s 32s/step - loss: 0.0030 - accuracy: 1.0000 - val_loss: 2.9629 - val_accuracy: 0.8020
Epoch 10/10
10/10 [==============================] - 323s 33s/step - loss: 4.1675e-04 - accuracy: 1.0000 - val_loss: 2.8798 - val_accuracy: 0.8119
4/4 [==============================] - 70s 17s/step
              precision    recall  f1-score   support

           0       1.00      0.37      0.54        30
           1       0.79      1.00      0.88        71

    accuracy                           0.81       101
   macro avg       0.89      0.68      0.71       101
weighted avg       0.85      0.81      0.78       101

Program running time: 9427.437506914139

'''