Spaces:
Sleeping
Sleeping
Commit
Β·
09823ea
0
Parent(s):
π Initial full clean push to Hugging Face
Browse files- .gitattributes +1 -0
- .huggingface.yml +10 -0
- app.py +206 -0
- disease_info.json +130 -0
- models/cropguard_best.pt +3 -0
- models/labels.json +19 -0
- requirements.txt +9 -0
- sample_images/grape_grape_esca_(black_measles)_sample_8.jpg +0 -0
- sample_images/grape_grape_leaf_blight_(isariopsis_leaf_spot)_sample_7.jpg +0 -0
- sample_images/grape_healthy_sample_5.jpg +0 -0
- sample_images/grape_healthy_sample_6.jpg +0 -0
- sample_images/potato_healthy_sample_1.jpg +0 -0
- sample_images/potato_healthy_sample_2.jpg +0 -0
- sample_images/potato_potato_early_blight_sample_3.jpg +0 -0
- sample_images/potato_potato_early_blight_sample_4.jpg +0 -0
- sample_images/tomato_healthy_sample_10.jpg +0 -0
- sample_images/tomato_healthy_sample_9.jpg +0 -0
- sample_images/tomato_tomato_late_blight_sample_11.jpg +0 -0
- sample_images/tomato_tomato_spider_mites_two-spotted_spider_mite_sample_12.jpg +0 -0
- src/data/__init__.py +0 -0
- src/data/augment.py +44 -0
- src/data/dataset.py +73 -0
- src/data/download.py +75 -0
- src/data/preprocess.py +55 -0
- src/model/__init__.py +0 -0
- src/model/architecture.py +28 -0
- src/model/gradcam.py +63 -0
- src/model/predict.py +26 -0
- src/model/train.py +48 -0
- src/models/cropguard_best.pt +3 -0
- src/models/labels.json +19 -0
- src/sample_images/grape_grape_esca_(black_measles)_sample_8.jpg +0 -0
- src/sample_images/grape_grape_leaf_blight_(isariopsis_leaf_spot)_sample_7.jpg +0 -0
- src/sample_images/grape_healthy_sample_5.jpg +0 -0
- src/sample_images/grape_healthy_sample_6.jpg +0 -0
- src/sample_images/potato_healthy_sample_1.jpg +0 -0
- src/sample_images/potato_healthy_sample_2.jpg +0 -0
- src/sample_images/potato_potato_early_blight_sample_3.jpg +0 -0
- src/sample_images/potato_potato_early_blight_sample_4.jpg +0 -0
- src/sample_images/tomato_healthy_sample_10.jpg +0 -0
- src/sample_images/tomato_healthy_sample_9.jpg +0 -0
- src/sample_images/tomato_tomato_late_blight_sample_11.jpg +0 -0
- src/sample_images/tomato_tomato_spider_mites_two-spotted_spider_mite_sample_12.jpg +0 -0
- src/utils/__init__.py +0 -0
- src/utils/config.py +41 -0
- src/utils/metrics.py +0 -0
- src/utils/transforms.py +20 -0
- src/{__init__.py} +0 -0
.gitattributes
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
.huggingface.yml
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: CropGuard
|
| 3 |
+
emoji: πΏ
|
| 4 |
+
colorFrom: green
|
| 5 |
+
colorTo: green
|
| 6 |
+
sdk: gradio
|
| 7 |
+
sdk_version: "4.24.0"
|
| 8 |
+
app_file: app.py
|
| 9 |
+
pinned: false
|
| 10 |
+
---
|
app.py
ADDED
|
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# app.py
|
| 2 |
+
|
| 3 |
+
import gradio as gr
|
| 4 |
+
import torch
|
| 5 |
+
import numpy as np
|
| 6 |
+
import matplotlib.pyplot as plt
|
| 7 |
+
import cv2
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
import json
|
| 11 |
+
from PIL import Image
|
| 12 |
+
|
| 13 |
+
# Add project root to sys.path
|
| 14 |
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
| 15 |
+
project_root = os.path.dirname(current_dir)
|
| 16 |
+
if project_root not in sys.path:
|
| 17 |
+
sys.path.append(project_root)
|
| 18 |
+
|
| 19 |
+
# Internal modules
|
| 20 |
+
from src.model.architecture import build_model
|
| 21 |
+
from src.model.gradcam import GradCAMPlusPlus as GradCAM
|
| 22 |
+
from src.utils.config import BEST_MODEL_PATH
|
| 23 |
+
|
| 24 |
+
# Load disease information
|
| 25 |
+
with open("disease_info.json", "r") as f:
|
| 26 |
+
DISEASE_INFO = json.load(f)
|
| 27 |
+
|
| 28 |
+
# Load label mapping
|
| 29 |
+
with open("models/labels.json", "r") as f:
|
| 30 |
+
idx_to_class = json.load(f)
|
| 31 |
+
|
| 32 |
+
# Load model
|
| 33 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 34 |
+
num_classes = len(idx_to_class)
|
| 35 |
+
|
| 36 |
+
model = build_model(num_classes=num_classes, freeze_backbone=False)
|
| 37 |
+
model.load_state_dict(torch.load(BEST_MODEL_PATH, map_location=device))
|
| 38 |
+
model = model.to(device)
|
| 39 |
+
model.eval()
|
| 40 |
+
|
| 41 |
+
target_layer = model.features[-1]
|
| 42 |
+
|
| 43 |
+
# Sample images
|
| 44 |
+
SAMPLE_DIR = "sample_images"
|
| 45 |
+
sample_choices = sorted(os.listdir(SAMPLE_DIR))
|
| 46 |
+
|
| 47 |
+
# Utility Functions
|
| 48 |
+
def beautify_name(raw_classname):
|
| 49 |
+
parts = raw_classname.split("___")
|
| 50 |
+
if len(parts) >= 3:
|
| 51 |
+
plant = parts[1].title()
|
| 52 |
+
disease = parts[2].replace("_", " ").replace("(", "").replace(")", "").replace("__", " ").title()
|
| 53 |
+
return plant, disease
|
| 54 |
+
else:
|
| 55 |
+
return "Unknown", "Unknown"
|
| 56 |
+
|
| 57 |
+
def generate_gradcam(model, input_tensor, target_layer):
|
| 58 |
+
gradcam = GradCAM(model, target_layer)
|
| 59 |
+
cam = gradcam.generate(input_tensor)
|
| 60 |
+
return cam
|
| 61 |
+
|
| 62 |
+
def preprocess_image(image):
|
| 63 |
+
from torchvision import transforms
|
| 64 |
+
transform = transforms.Compose([
|
| 65 |
+
transforms.Resize((224, 224)),
|
| 66 |
+
transforms.ToTensor(),
|
| 67 |
+
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
|
| 68 |
+
])
|
| 69 |
+
return transform(image)
|
| 70 |
+
|
| 71 |
+
def predict(image):
|
| 72 |
+
if image is None:
|
| 73 |
+
return None, None, None, None, None
|
| 74 |
+
|
| 75 |
+
model.eval()
|
| 76 |
+
image_tensor = preprocess_image(image).unsqueeze(0).to(device)
|
| 77 |
+
|
| 78 |
+
with torch.no_grad():
|
| 79 |
+
output = model(image_tensor)
|
| 80 |
+
probs = torch.softmax(output, dim=1).cpu().numpy()[0]
|
| 81 |
+
|
| 82 |
+
top3_indices = probs.argsort()[-3:][::-1]
|
| 83 |
+
top3_classes = [(idx_to_class[str(idx)], float(probs[idx])) for idx in top3_indices]
|
| 84 |
+
|
| 85 |
+
pred_class, pred_prob = top3_classes[0]
|
| 86 |
+
plant, disease = beautify_name(pred_class)
|
| 87 |
+
|
| 88 |
+
# Format Top-3 nicely
|
| 89 |
+
top3_text = ""
|
| 90 |
+
for i, (c, p) in enumerate(top3_classes, 1):
|
| 91 |
+
c_plant, c_disease = beautify_name(c)
|
| 92 |
+
top3_text += f"{i}. {c_plant} - {c_disease} ({p*100:.2f}%)\n"
|
| 93 |
+
|
| 94 |
+
# GradCAM
|
| 95 |
+
cam = generate_gradcam(model, image_tensor, target_layer)
|
| 96 |
+
|
| 97 |
+
img_np = np.array(image) / 255.0
|
| 98 |
+
|
| 99 |
+
# π₯ Resize uploaded image to 224x224 for matching heatmap
|
| 100 |
+
img_np_resized = cv2.resize(img_np, (224, 224))
|
| 101 |
+
|
| 102 |
+
img_gray = cv2.cvtColor(np.uint8(img_np_resized * 255), cv2.COLOR_RGB2GRAY) / 255.0
|
| 103 |
+
img_gray_3ch = np.stack([img_gray]*3, axis=-1)
|
| 104 |
+
|
| 105 |
+
heatmap = cv2.applyColorMap(np.uint8(255 * cam), cv2.COLORMAP_PLASMA)
|
| 106 |
+
heatmap = np.float32(heatmap) / 255
|
| 107 |
+
|
| 108 |
+
overlay = heatmap + img_gray_3ch
|
| 109 |
+
overlay = overlay / np.max(overlay)
|
| 110 |
+
|
| 111 |
+
fig, ax = plt.subplots(figsize=(5,5))
|
| 112 |
+
ax.imshow(overlay)
|
| 113 |
+
ax.axis("off")
|
| 114 |
+
cbar = plt.colorbar(plt.cm.ScalarMappable(cmap='plasma'), orientation='horizontal', pad=0.05, ax=ax)
|
| 115 |
+
cbar.set_ticks([0, 0.5, 1])
|
| 116 |
+
cbar.set_ticklabels(['Low Focus', 'Medium Focus', 'High Focus'])
|
| 117 |
+
plt.tight_layout()
|
| 118 |
+
|
| 119 |
+
cam_path = "cam_output.png"
|
| 120 |
+
fig.savefig(cam_path)
|
| 121 |
+
plt.close(fig)
|
| 122 |
+
|
| 123 |
+
# Health Status + Disease Info
|
| 124 |
+
if "healthy" in pred_class.lower():
|
| 125 |
+
health_status = f"{plant} - Healthy"
|
| 126 |
+
identified_disease = "None"
|
| 127 |
+
disease_info_text = "β
No disease detected."
|
| 128 |
+
else:
|
| 129 |
+
health_status = f"{plant} - Diseased"
|
| 130 |
+
identified_disease = disease
|
| 131 |
+
disease_data = DISEASE_INFO.get(pred_class, {})
|
| 132 |
+
disease_info_text = f"""
|
| 133 |
+
**Symptoms:** {disease_data.get('symptoms', 'No information available.')}
|
| 134 |
+
|
| 135 |
+
**Causes:** {disease_data.get('causes', 'No information available.')}
|
| 136 |
+
|
| 137 |
+
**Disease Cycle:** {disease_data.get('disease_cycle', 'No information available.')}
|
| 138 |
+
|
| 139 |
+
**Care & Treatment:** {disease_data.get('care_treatment', 'No information available.')}
|
| 140 |
+
|
| 141 |
+
[Learn more on Wikipedia]({disease_data.get('wiki_url', '#')})
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
alert = None
|
| 145 |
+
if pred_prob < 0.6:
|
| 146 |
+
alert = "β οΈ Low confidence in prediction! Please verify manually."
|
| 147 |
+
|
| 148 |
+
return health_status, identified_disease, top3_text, alert, cam_path, disease_info_text
|
| 149 |
+
|
| 150 |
+
def load_sample_image(sample_name):
|
| 151 |
+
img_path = os.path.join(SAMPLE_DIR, sample_name)
|
| 152 |
+
img = Image.open(img_path).convert("RGB")
|
| 153 |
+
return img
|
| 154 |
+
|
| 155 |
+
# Interface
|
| 156 |
+
title = "CropGuard: Leaf Disease Detector"
|
| 157 |
+
copyright_text = "Β© 2025 Made by [Arka Mitra](https://github.com/mitraarka27)"
|
| 158 |
+
instruction_text = """
|
| 159 |
+
Upload a clear image of a **potato**, **tomato**, or **grape** leaf.
|
| 160 |
+
|
| 161 |
+
CropGuard will predict:
|
| 162 |
+
- Whether the leaf is **healthy** or **diseased**.
|
| 163 |
+
- The likely disease (if any).
|
| 164 |
+
- Where the model focused its attention.
|
| 165 |
+
|
| 166 |
+
β‘ **Note**: Currently supports only **Potato, Tomato, Grape** leaves.
|
| 167 |
+
"""
|
| 168 |
+
|
| 169 |
+
with gr.Blocks(theme="default") as app:
|
| 170 |
+
with gr.Row():
|
| 171 |
+
gr.Markdown(f"<h1 style='text-align: center;'>{title}</h1>")
|
| 172 |
+
gr.Markdown("<p style='text-align: center;'>Β© 2025 Made by <a href='https://github.com/mitraarka27' target='_blank'>Arka Mitra</a></p>")
|
| 173 |
+
with gr.Row():
|
| 174 |
+
with gr.Column(scale=2):
|
| 175 |
+
upload = gr.Image(
|
| 176 |
+
type="pil",
|
| 177 |
+
sources=["upload", "webcam", "clipboard"],
|
| 178 |
+
label="Upload, Capture, or Paste Leaf Image"
|
| 179 |
+
)
|
| 180 |
+
gr.Markdown("**OR** choose from sample images below:")
|
| 181 |
+
sample_dropdown = gr.Dropdown(choices=sample_choices, label="Select a Sample Image")
|
| 182 |
+
load_btn = gr.Button("Load Sample Image")
|
| 183 |
+
predict_btn = gr.Button("Predict", variant="primary")
|
| 184 |
+
gr.Markdown(instruction_text)
|
| 185 |
+
alert_box = gr.Textbox(label="Prediction Alert", lines=2, interactive=False)
|
| 186 |
+
top3_preds = gr.Textbox(label="Top-3 Predictions", lines=5, interactive=False)
|
| 187 |
+
with gr.Column(scale=3):
|
| 188 |
+
health_status = gr.Label(label="Plant Health Status")
|
| 189 |
+
disease_name = gr.Label(label="Identified Disease (includes details)")
|
| 190 |
+
disease_info = gr.Markdown()
|
| 191 |
+
heatmap = gr.Image(label="Model Focus Heatmap")
|
| 192 |
+
|
| 193 |
+
load_btn.click(
|
| 194 |
+
fn=load_sample_image,
|
| 195 |
+
inputs=[sample_dropdown],
|
| 196 |
+
outputs=[upload]
|
| 197 |
+
)
|
| 198 |
+
|
| 199 |
+
predict_btn.click(
|
| 200 |
+
fn=predict,
|
| 201 |
+
inputs=[upload],
|
| 202 |
+
outputs=[health_status, disease_name, top3_preds, alert_box, heatmap, disease_info]
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
if __name__ == "__main__":
|
| 206 |
+
app.launch(server_name="127.0.0.1", server_port=7860, share=True)
|
disease_info.json
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"grape___Grape___Black_rot": {
|
| 3 |
+
"name": "Grape Black Rot",
|
| 4 |
+
"symptoms": "Small, brown spots on leaves that expand and develop black margins. Infected grapes shrivel and turn into hard, black 'mummies'.",
|
| 5 |
+
"causes": "Caused by the fungus Guignardia bidwellii. Wet, warm conditions promote infection.",
|
| 6 |
+
"disease_cycle": "Overwinters in mummified fruits and old canes. Rain splashes spread spores to new shoots and fruits during spring and summer.",
|
| 7 |
+
"care_treatment": "Prune and destroy infected material. Apply early-season fungicide sprays. Promote air circulation by proper vine spacing.",
|
| 8 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Black_rot_(grape_disease)"
|
| 9 |
+
},
|
| 10 |
+
"grape___Grape___Esca_(Black_Measles)": {
|
| 11 |
+
"name": "Grape Esca (Black Measles)",
|
| 12 |
+
"symptoms": "Interveinal yellowing and scorching of leaves. Berries show dark spots, shrivel, and dry on the vine.",
|
| 13 |
+
"causes": "Caused by a complex of fungi including Phaeomoniella chlamydospora. Wounds from pruning can facilitate infection.",
|
| 14 |
+
"disease_cycle": "Fungi infect through pruning wounds and colonize the vascular system. Symptoms worsen over several seasons.",
|
| 15 |
+
"care_treatment": "Avoid large pruning wounds. Remove infected vines if severe. Some systemic fungicides may help but control is difficult.",
|
| 16 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Esca_(grape)"
|
| 17 |
+
},
|
| 18 |
+
"grape___Grape___Leaf_blight_(Isariopsis_Leaf_Spot)": {
|
| 19 |
+
"name": "Grape Leaf Blight (Isariopsis Leaf Spot)",
|
| 20 |
+
"symptoms": "Brown, angular lesions on grape leaves, often leading to early defoliation.",
|
| 21 |
+
"causes": "Caused by the fungus Pseudocercospora vitis (formerly Isariopsis). Moist, humid environments encourage disease spread.",
|
| 22 |
+
"disease_cycle": "Spores are produced on infected leaves and spread by wind and rain. New infections occur throughout the growing season.",
|
| 23 |
+
"care_treatment": "Apply protective fungicides during humid periods. Remove fallen infected leaves to reduce inoculum.",
|
| 24 |
+
"wiki_url": "https://www.sciencedirect.com/science/article/abs/pii/S0261219414001135"
|
| 25 |
+
},
|
| 26 |
+
"potato___Potato___Early_blight": {
|
| 27 |
+
"name": "Potato Early Blight",
|
| 28 |
+
"symptoms": "Small brown lesions with concentric rings on leaves. Older leaves yellow and drop prematurely.",
|
| 29 |
+
"causes": "Caused by the fungus Alternaria solani. Stress factors like drought or poor nutrition can worsen outbreaks.",
|
| 30 |
+
"disease_cycle": "Overwinters in soil or debris. Spores are dispersed by wind and rain to infect new plants.",
|
| 31 |
+
"care_treatment": "Use resistant varieties. Apply fungicides early. Maintain proper plant nutrition and avoid water stress.",
|
| 32 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Early_blight"
|
| 33 |
+
},
|
| 34 |
+
"potato___Potato___Late_blight": {
|
| 35 |
+
"name": "Potato Late Blight",
|
| 36 |
+
"symptoms": "Large, irregularly shaped water-soaked lesions on leaves, stems, and tubers; white mold under humid conditions.",
|
| 37 |
+
"causes": "Caused by the oomycete Phytophthora infestans, favored by cool and wet weather.",
|
| 38 |
+
"disease_cycle": "Spores spread rapidly through rain and wind, causing epidemics in wet conditions.",
|
| 39 |
+
"care_treatment": "Destroy infected plants. Apply protective fungicides before expected rain. Plant certified disease-free seed potatoes.",
|
| 40 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Late_blight"
|
| 41 |
+
},
|
| 42 |
+
"potato___Potato___healthy": {
|
| 43 |
+
"name": "Healthy Potato Leaf",
|
| 44 |
+
"symptoms": "No visible signs of disease. Leaves are green, firm, and uniform.",
|
| 45 |
+
"causes": "N/A",
|
| 46 |
+
"disease_cycle": "N/A",
|
| 47 |
+
"care_treatment": "Maintain good crop hygiene, regular monitoring, and optimal nutrition.",
|
| 48 |
+
"wiki_url": ""
|
| 49 |
+
},
|
| 50 |
+
"tomato___Tomato___Bacterial_spot": {
|
| 51 |
+
"name": "Tomato Bacterial Spot",
|
| 52 |
+
"symptoms": "Small, dark brown water-soaked spots on leaves, stems, and fruit. Can cause fruit blemishes and leaf loss.",
|
| 53 |
+
"causes": "Caused by Xanthomonas species. Spread favored by warm, wet weather.",
|
| 54 |
+
"disease_cycle": "Bacteria overwinter in plant debris and seeds. Splashing water spreads bacteria to healthy tissues.",
|
| 55 |
+
"care_treatment": "Use certified disease-free seed. Apply copper-based bactericides. Practice crop rotation.",
|
| 56 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Bacterial_spot"
|
| 57 |
+
},
|
| 58 |
+
"tomato___Tomato___Early_blight": {
|
| 59 |
+
"name": "Tomato Early Blight",
|
| 60 |
+
"symptoms": "Brown circular lesions with concentric rings on lower leaves. Defoliation and poor fruit development follow.",
|
| 61 |
+
"causes": "Caused by the fungus Alternaria solani. Humid conditions favor spread.",
|
| 62 |
+
"disease_cycle": "Spores overwinter in debris and infect plants in moist conditions, spreading via wind and rain.",
|
| 63 |
+
"care_treatment": "Apply fungicides preventively. Maintain plant vigor and spacing. Remove plant debris.",
|
| 64 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Early_blight"
|
| 65 |
+
},
|
| 66 |
+
"tomato___Tomato___Late_blight": {
|
| 67 |
+
"name": "Tomato Late Blight",
|
| 68 |
+
"symptoms": "Dark, greasy lesions on leaves and stems. Fruit may show brown rot with a white mold under humid conditions.",
|
| 69 |
+
"causes": "Caused by Phytophthora infestans. Cool, wet weather accelerates outbreaks.",
|
| 70 |
+
"disease_cycle": "Rapid spread by airborne spores under moist conditions.",
|
| 71 |
+
"care_treatment": "Remove infected plants immediately. Use fungicide sprays during cool, rainy periods.",
|
| 72 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Late_blight"
|
| 73 |
+
},
|
| 74 |
+
"tomato___Tomato___Leaf_Mold": {
|
| 75 |
+
"name": "Tomato Leaf Mold",
|
| 76 |
+
"symptoms": "Yellow spots on upper leaf surfaces, olive-green mold underneath. Severe cases cause leaf drop.",
|
| 77 |
+
"causes": "Caused by the fungus Passalora fulva. High humidity inside greenhouses favors the disease.",
|
| 78 |
+
"disease_cycle": "Spores spread through air and water. Survives in greenhouse debris.",
|
| 79 |
+
"care_treatment": "Increase air circulation. Use resistant varieties. Apply fungicides in greenhouse crops.",
|
| 80 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Leaf_mold"
|
| 81 |
+
},
|
| 82 |
+
"tomato___Tomato___Septoria_leaf_spot": {
|
| 83 |
+
"name": "Tomato Septoria Leaf Spot",
|
| 84 |
+
"symptoms": "Numerous small, dark spots with gray centers on leaves. Causes early defoliation and yield loss.",
|
| 85 |
+
"causes": "Caused by the fungus Septoria lycopersici. Moist, humid conditions favor outbreaks.",
|
| 86 |
+
"disease_cycle": "Spores are produced on infected tissue and spread by water splash.",
|
| 87 |
+
"care_treatment": "Rotate crops. Remove and destroy infected material. Apply appropriate fungicides.",
|
| 88 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Septoria_leaf_spot"
|
| 89 |
+
},
|
| 90 |
+
"tomato___Tomato___Spider_mites Two-spotted_spider_mite": {
|
| 91 |
+
"name": "Tomato Spider Mites (Two-Spotted)",
|
| 92 |
+
"symptoms": "Fine stippling on leaves, yellowing, bronzing, and webbing in severe cases.",
|
| 93 |
+
"causes": "Caused by the tiny pest Tetranychus urticae (two-spotted spider mite). Hot, dry conditions encourage outbreaks.",
|
| 94 |
+
"disease_cycle": "Eggs laid on undersides of leaves hatch and develop rapidly under warm temperatures.",
|
| 95 |
+
"care_treatment": "Spray with miticides or insecticidal soap. Increase humidity. Use predatory mites for biological control.",
|
| 96 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Tetranychus_urticae"
|
| 97 |
+
},
|
| 98 |
+
"tomato___Tomato___Target_Spot": {
|
| 99 |
+
"name": "Tomato Target Spot",
|
| 100 |
+
"symptoms": "Brown lesions with concentric rings resembling a target. Affects leaves, stems, and fruit.",
|
| 101 |
+
"causes": "Caused by the fungus Corynespora cassiicola.",
|
| 102 |
+
"disease_cycle": "Spores spread by wind and water, especially under humid conditions.",
|
| 103 |
+
"care_treatment": "Apply fungicides preventively. Maintain good field sanitation.",
|
| 104 |
+
"wiki_url": "https://www.apsnet.org/edcenter/disandpath/fungalbasidio/pdlessons/Pages/TargetSpot.aspx"
|
| 105 |
+
},
|
| 106 |
+
"tomato___Tomato___Tomato_Yellow_Leaf_Curl_Virus": {
|
| 107 |
+
"name": "Tomato Yellow Leaf Curl Virus",
|
| 108 |
+
"symptoms": "Severe leaf curling, yellowing, and stunted plant growth. Reduces fruit set and quality.",
|
| 109 |
+
"causes": "Caused by Tomato yellow leaf curl virus (TYLCV), transmitted by whiteflies.",
|
| 110 |
+
"disease_cycle": "Whiteflies acquire and transmit the virus while feeding.",
|
| 111 |
+
"care_treatment": "Use resistant varieties. Control whiteflies with insecticides and barriers.",
|
| 112 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Tomato_yellow_leaf_curl_virus"
|
| 113 |
+
},
|
| 114 |
+
"tomato___Tomato___Tomato_mosaic_virus": {
|
| 115 |
+
"name": "Tomato Mosaic Virus",
|
| 116 |
+
"symptoms": "Mottled light and dark green areas on leaves, leaf curling, and reduced fruit yield.",
|
| 117 |
+
"causes": "Caused by Tomato mosaic virus (ToMV), a highly stable virus spread by mechanical contact.",
|
| 118 |
+
"disease_cycle": "Spread by contaminated tools, hands, and infected seeds.",
|
| 119 |
+
"care_treatment": "Disinfect tools. Use virus-free seed. Remove infected plants immediately.",
|
| 120 |
+
"wiki_url": "https://en.wikipedia.org/wiki/Tomato_mosaic_virus"
|
| 121 |
+
},
|
| 122 |
+
"tomato___Tomato___healthy": {
|
| 123 |
+
"name": "Healthy Tomato Leaf",
|
| 124 |
+
"symptoms": "β
No disease detected. Leaves appear green, strong, and vigorous.",
|
| 125 |
+
"causes": "N/A",
|
| 126 |
+
"disease_cycle": "N/A",
|
| 127 |
+
"care_treatment": "Maintain proper crop care and hygiene practices.",
|
| 128 |
+
"wiki_url": ""
|
| 129 |
+
}
|
| 130 |
+
}
|
models/cropguard_best.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:926993adb8aba8af6cc714d23b580bf1d4d8a690b89ab5a9904ede4d57166e76
|
| 3 |
+
size 9225099
|
models/labels.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"0": "grape___Grape___Black_rot",
|
| 3 |
+
"1": "grape___Grape___Esca_(Black_Measles)",
|
| 4 |
+
"2": "grape___Grape___Leaf_blight_(Isariopsis_Leaf_Spot)",
|
| 5 |
+
"3": "grape___Grape___healthy",
|
| 6 |
+
"4": "potato___Potato___Early_blight",
|
| 7 |
+
"5": "potato___Potato___Late_blight",
|
| 8 |
+
"6": "potato___Potato___healthy",
|
| 9 |
+
"7": "tomato___Tomato___Bacterial_spot",
|
| 10 |
+
"8": "tomato___Tomato___Early_blight",
|
| 11 |
+
"9": "tomato___Tomato___Late_blight",
|
| 12 |
+
"10": "tomato___Tomato___Leaf_Mold",
|
| 13 |
+
"11": "tomato___Tomato___Septoria_leaf_spot",
|
| 14 |
+
"12": "tomato___Tomato___Spider_mites Two-spotted_spider_mite",
|
| 15 |
+
"13": "tomato___Tomato___Target_Spot",
|
| 16 |
+
"14": "tomato___Tomato___Tomato_Yellow_Leaf_Curl_Virus",
|
| 17 |
+
"15": "tomato___Tomato___Tomato_mosaic_virus",
|
| 18 |
+
"16": "tomato___Tomato___healthy"
|
| 19 |
+
}
|
requirements.txt
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
torch>=2.0
|
| 2 |
+
torchvision>=0.15
|
| 3 |
+
scikit-learn>=1.2
|
| 4 |
+
numpy
|
| 5 |
+
pandas
|
| 6 |
+
matplotlib
|
| 7 |
+
opencv-python
|
| 8 |
+
gradio
|
| 9 |
+
Pillow
|
sample_images/grape_grape_esca_(black_measles)_sample_8.jpg
ADDED
|
sample_images/grape_grape_leaf_blight_(isariopsis_leaf_spot)_sample_7.jpg
ADDED
|
sample_images/grape_healthy_sample_5.jpg
ADDED
|
sample_images/grape_healthy_sample_6.jpg
ADDED
|
sample_images/potato_healthy_sample_1.jpg
ADDED
|
sample_images/potato_healthy_sample_2.jpg
ADDED
|
sample_images/potato_potato_early_blight_sample_3.jpg
ADDED
|
sample_images/potato_potato_early_blight_sample_4.jpg
ADDED
|
sample_images/tomato_healthy_sample_10.jpg
ADDED
|
sample_images/tomato_healthy_sample_9.jpg
ADDED
|
sample_images/tomato_tomato_late_blight_sample_11.jpg
ADDED
|
sample_images/tomato_tomato_spider_mites_two-spotted_spider_mite_sample_12.jpg
ADDED
|
src/data/__init__.py
ADDED
|
File without changes
|
src/data/augment.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torchvision.transforms as T
|
| 2 |
+
|
| 3 |
+
class AugmentationPipeline:
|
| 4 |
+
"""
|
| 5 |
+
Data augmentation and preprocessing transformations for CropGuard.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
def __init__(self):
|
| 9 |
+
# Mean and Std from ImageNet (can be adjusted later if needed)
|
| 10 |
+
self.mean = [0.485, 0.456, 0.406]
|
| 11 |
+
self.std = [0.229, 0.224, 0.225]
|
| 12 |
+
|
| 13 |
+
# Define transformations
|
| 14 |
+
self.train_transforms = T.Compose([
|
| 15 |
+
T.RandomHorizontalFlip(p=0.5),
|
| 16 |
+
T.RandomVerticalFlip(p=0.5),
|
| 17 |
+
T.RandomRotation(degrees=30),
|
| 18 |
+
T.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.1),
|
| 19 |
+
T.ToTensor(),
|
| 20 |
+
T.Normalize(mean=self.mean, std=self.std)
|
| 21 |
+
])
|
| 22 |
+
|
| 23 |
+
self.val_transforms = T.Compose([
|
| 24 |
+
T.ToTensor(),
|
| 25 |
+
T.Normalize(mean=self.mean, std=self.std)
|
| 26 |
+
])
|
| 27 |
+
|
| 28 |
+
self.test_transforms = T.Compose([
|
| 29 |
+
T.ToTensor(),
|
| 30 |
+
T.Normalize(mean=self.mean, std=self.std)
|
| 31 |
+
])
|
| 32 |
+
|
| 33 |
+
def get_transforms(self, phase="train"):
|
| 34 |
+
"""
|
| 35 |
+
Returns the appropriate transformation based on phase.
|
| 36 |
+
"""
|
| 37 |
+
if phase == "train":
|
| 38 |
+
return self.train_transforms
|
| 39 |
+
elif phase == "val":
|
| 40 |
+
return self.val_transforms
|
| 41 |
+
elif phase == "test":
|
| 42 |
+
return self.test_transforms
|
| 43 |
+
else:
|
| 44 |
+
raise ValueError(f"Unknown phase: {phase}. Use 'train', 'val', or 'test'.")
|
src/data/dataset.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from PIL import Image
|
| 3 |
+
from torch.utils.data import Dataset
|
| 4 |
+
|
| 5 |
+
class PlantVillageDataset(Dataset):
|
| 6 |
+
"""
|
| 7 |
+
PyTorch-compatible dataset for the cleaned and split PlantVillage dataset.
|
| 8 |
+
|
| 9 |
+
Directory structure should be:
|
| 10 |
+
root/
|
| 11 |
+
crop1/
|
| 12 |
+
disease1/
|
| 13 |
+
img1.jpg
|
| 14 |
+
...
|
| 15 |
+
disease2/
|
| 16 |
+
...
|
| 17 |
+
crop2/
|
| 18 |
+
...
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(self, root_dir, transform=None):
|
| 22 |
+
"""
|
| 23 |
+
Args:
|
| 24 |
+
root_dir (str): Path to split directory (e.g., data/split/train)
|
| 25 |
+
transform (callable, optional): Transformations to apply to images
|
| 26 |
+
"""
|
| 27 |
+
self.root_dir = root_dir
|
| 28 |
+
self.transform = transform
|
| 29 |
+
self.samples = []
|
| 30 |
+
self.class_to_idx = {}
|
| 31 |
+
self._prepare_dataset()
|
| 32 |
+
|
| 33 |
+
def _prepare_dataset(self):
|
| 34 |
+
"""
|
| 35 |
+
Scan directory and build (image_path, class_index) list
|
| 36 |
+
"""
|
| 37 |
+
class_names = []
|
| 38 |
+
for crop in sorted(os.listdir(self.root_dir)):
|
| 39 |
+
crop_path = os.path.join(self.root_dir, crop)
|
| 40 |
+
if not os.path.isdir(crop_path):
|
| 41 |
+
continue
|
| 42 |
+
|
| 43 |
+
for disease in sorted(os.listdir(crop_path)):
|
| 44 |
+
disease_path = os.path.join(crop_path, disease)
|
| 45 |
+
if not os.path.isdir(disease_path):
|
| 46 |
+
continue # Safety check
|
| 47 |
+
|
| 48 |
+
class_name = f"{crop}___{disease}"
|
| 49 |
+
if class_name not in self.class_to_idx:
|
| 50 |
+
self.class_to_idx[class_name] = len(self.class_to_idx)
|
| 51 |
+
class_names.append(class_name)
|
| 52 |
+
|
| 53 |
+
label = self.class_to_idx[class_name]
|
| 54 |
+
|
| 55 |
+
for fname in os.listdir(disease_path):
|
| 56 |
+
if not fname.lower().endswith((".jpg", ".jpeg", ".png")):
|
| 57 |
+
continue
|
| 58 |
+
img_path = os.path.join(disease_path, fname)
|
| 59 |
+
self.samples.append((img_path, label))
|
| 60 |
+
|
| 61 |
+
# print(f"[INFO] {len(self.samples)} images found across {len(self.class_to_idx)} classes.")
|
| 62 |
+
|
| 63 |
+
def __len__(self):
|
| 64 |
+
return len(self.samples)
|
| 65 |
+
|
| 66 |
+
def __getitem__(self, idx):
|
| 67 |
+
img_path, label = self.samples[idx]
|
| 68 |
+
image = Image.open(img_path).convert("RGB")
|
| 69 |
+
|
| 70 |
+
if self.transform:
|
| 71 |
+
image = self.transform(image)
|
| 72 |
+
|
| 73 |
+
return image, label
|
src/data/download.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
from kaggle.api.kaggle_api_extended import KaggleApi
|
| 4 |
+
from src.utils.config import DATA_DIR, CLEAN_DIR, RAW_DIR, TARGET_CROPS # β
Import cleanly
|
| 5 |
+
|
| 6 |
+
# -------------------------------
|
| 7 |
+
# Functions
|
| 8 |
+
# -------------------------------
|
| 9 |
+
|
| 10 |
+
def download_and_extract_dataset():
|
| 11 |
+
"""
|
| 12 |
+
Download and extract the full PlantVillage dataset.
|
| 13 |
+
"""
|
| 14 |
+
os.makedirs(RAW_DIR, exist_ok=True)
|
| 15 |
+
|
| 16 |
+
api = KaggleApi()
|
| 17 |
+
api.authenticate()
|
| 18 |
+
|
| 19 |
+
print("[INFO] Downloading PlantVillage dataset...")
|
| 20 |
+
api.dataset_download_files('mohitsingh1804/plantvillage', path=RAW_DIR, unzip=True)
|
| 21 |
+
print("[INFO] Download complete and extracted.")
|
| 22 |
+
|
| 23 |
+
def clean_and_organize_dataset():
|
| 24 |
+
"""
|
| 25 |
+
Organize Potato, Tomato, Grape from train/ and val/ into clean/ directory.
|
| 26 |
+
"""
|
| 27 |
+
extracted_dir = os.path.join(RAW_DIR, "plantvillage")
|
| 28 |
+
|
| 29 |
+
train_dir = os.path.join(extracted_dir, "train")
|
| 30 |
+
val_dir = os.path.join(extracted_dir, "val")
|
| 31 |
+
|
| 32 |
+
if not os.path.exists(CLEAN_DIR):
|
| 33 |
+
os.makedirs(CLEAN_DIR)
|
| 34 |
+
print(f"[INFO] Created clean directory at: {CLEAN_DIR}")
|
| 35 |
+
|
| 36 |
+
for split_dir in [train_dir, val_dir]:
|
| 37 |
+
if not os.path.exists(split_dir):
|
| 38 |
+
raise FileNotFoundError(f"[ERROR] {split_dir} not found.")
|
| 39 |
+
|
| 40 |
+
for folder in os.listdir(split_dir):
|
| 41 |
+
full_folder_path = os.path.join(split_dir, folder)
|
| 42 |
+
if os.path.isdir(full_folder_path) and any(folder.startswith(crop) for crop in TARGET_CROPS):
|
| 43 |
+
crop_name = folder.split("___")[0].lower()
|
| 44 |
+
disease_folder = folder
|
| 45 |
+
|
| 46 |
+
destination_crop_dir = os.path.join(CLEAN_DIR, crop_name)
|
| 47 |
+
os.makedirs(destination_crop_dir, exist_ok=True)
|
| 48 |
+
|
| 49 |
+
destination_disease_dir = os.path.join(destination_crop_dir, disease_folder)
|
| 50 |
+
os.makedirs(destination_disease_dir, exist_ok=True)
|
| 51 |
+
|
| 52 |
+
for img_file in os.listdir(full_folder_path):
|
| 53 |
+
src_img = os.path.join(full_folder_path, img_file)
|
| 54 |
+
dst_img = os.path.join(destination_disease_dir, img_file)
|
| 55 |
+
shutil.copy(src_img, dst_img)
|
| 56 |
+
|
| 57 |
+
print("[INFO] Crops cleaned and organized into 'clean/' directory from train and val folders.")
|
| 58 |
+
|
| 59 |
+
# -------------------------------
|
| 60 |
+
# Remove plant_disease_raw after cleaning
|
| 61 |
+
# -------------------------------
|
| 62 |
+
if os.path.exists(RAW_DIR):
|
| 63 |
+
shutil.rmtree(RAW_DIR)
|
| 64 |
+
print(f"[INFO] Deleted raw data directory at {RAW_DIR} after cleaning.")
|
| 65 |
+
|
| 66 |
+
def check_data_integrity():
|
| 67 |
+
"""
|
| 68 |
+
Quick check that clean/ has the crops properly.
|
| 69 |
+
"""
|
| 70 |
+
if not os.path.exists(CLEAN_DIR):
|
| 71 |
+
raise FileNotFoundError(f"[ERROR] Clean folder {CLEAN_DIR} not found!")
|
| 72 |
+
|
| 73 |
+
for crop in os.listdir(CLEAN_DIR):
|
| 74 |
+
crop_dir = os.path.join(CLEAN_DIR, crop)
|
| 75 |
+
print(f"[INFO] {crop.capitalize()}: {len(os.listdir(crop_dir))} disease classes found.")
|
src/data/preprocess.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import random
|
| 4 |
+
from src.utils.config import DATA_DIR, SPLIT_DIR, SEED
|
| 5 |
+
|
| 6 |
+
def split_clean_data(train_ratio=0.7, val_ratio=0.15, test_ratio=0.15, seed=SEED):
|
| 7 |
+
"""
|
| 8 |
+
Split cleaned data into train/val/test splits under data/split/.
|
| 9 |
+
"""
|
| 10 |
+
random.seed(seed)
|
| 11 |
+
|
| 12 |
+
clean_dir = os.path.join(DATA_DIR, "clean")
|
| 13 |
+
|
| 14 |
+
# Remove previous split if exists
|
| 15 |
+
if os.path.exists(SPLIT_DIR):
|
| 16 |
+
shutil.rmtree(SPLIT_DIR)
|
| 17 |
+
os.makedirs(SPLIT_DIR)
|
| 18 |
+
|
| 19 |
+
for crop in os.listdir(clean_dir):
|
| 20 |
+
crop_path = os.path.join(clean_dir, crop)
|
| 21 |
+
|
| 22 |
+
for disease_folder in os.listdir(crop_path):
|
| 23 |
+
disease_path = os.path.join(crop_path, disease_folder)
|
| 24 |
+
images = os.listdir(disease_path)
|
| 25 |
+
if len(images) == 0:
|
| 26 |
+
print(f"[WARNING] No images found in {disease_path}, skipping.")
|
| 27 |
+
continue
|
| 28 |
+
|
| 29 |
+
random.shuffle(images)
|
| 30 |
+
|
| 31 |
+
n_total = len(images)
|
| 32 |
+
n_train = int(n_total * train_ratio)
|
| 33 |
+
n_val = int(n_total * val_ratio)
|
| 34 |
+
|
| 35 |
+
# Safety check: at least 1 sample in each split
|
| 36 |
+
if n_train == 0 or n_val == 0 or (n_total - n_train - n_val) == 0:
|
| 37 |
+
print(f"[WARNING] Not enough images to split {disease_path} properly, skipping.")
|
| 38 |
+
continue
|
| 39 |
+
|
| 40 |
+
splits = {
|
| 41 |
+
"train": images[:n_train],
|
| 42 |
+
"val": images[n_train:n_train+n_val],
|
| 43 |
+
"test": images[n_train+n_val:]
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
for split_name, split_images in splits.items():
|
| 47 |
+
target_dir = os.path.join(SPLIT_DIR, split_name, crop, disease_folder)
|
| 48 |
+
os.makedirs(target_dir, exist_ok=True)
|
| 49 |
+
|
| 50 |
+
for img_name in split_images:
|
| 51 |
+
src_img = os.path.join(disease_path, img_name)
|
| 52 |
+
dst_img = os.path.join(target_dir, img_name)
|
| 53 |
+
shutil.copy(src_img, dst_img)
|
| 54 |
+
|
| 55 |
+
print("[INFO] Finished creating train/val/test split.")
|
src/model/__init__.py
ADDED
|
File without changes
|
src/model/architecture.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
from torchvision import models
|
| 4 |
+
|
| 5 |
+
def build_model(num_classes, freeze_backbone=True):
|
| 6 |
+
"""
|
| 7 |
+
Build and return a MobileNetV2 model fine-tuned for our custom classes.
|
| 8 |
+
|
| 9 |
+
Args:
|
| 10 |
+
num_classes (int): Number of disease classes
|
| 11 |
+
freeze_backbone (bool): If True, freeze feature extractor layers
|
| 12 |
+
|
| 13 |
+
Returns:
|
| 14 |
+
model (nn.Module)
|
| 15 |
+
"""
|
| 16 |
+
model = models.mobilenet_v2(weights='IMAGENET1K_V1')
|
| 17 |
+
|
| 18 |
+
if freeze_backbone:
|
| 19 |
+
for param in model.features.parameters():
|
| 20 |
+
param.requires_grad = False
|
| 21 |
+
|
| 22 |
+
# Replace the classifier
|
| 23 |
+
model.classifier = nn.Sequential(
|
| 24 |
+
nn.Dropout(0.2),
|
| 25 |
+
nn.Linear(model.last_channel, num_classes)
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
return model
|
src/model/gradcam.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# src/model/gradcam.py
|
| 2 |
+
|
| 3 |
+
import torch
|
| 4 |
+
import torch.nn.functional as F
|
| 5 |
+
import numpy as np
|
| 6 |
+
import cv2
|
| 7 |
+
|
| 8 |
+
class GradCAMPlusPlus:
|
| 9 |
+
def __init__(self, model, target_layer):
|
| 10 |
+
self.model = model
|
| 11 |
+
self.model.eval()
|
| 12 |
+
self.target_layer = target_layer
|
| 13 |
+
|
| 14 |
+
self.gradients = None
|
| 15 |
+
self.activations = None
|
| 16 |
+
|
| 17 |
+
# Hook to capture activations and gradients
|
| 18 |
+
target_layer.register_forward_hook(self._save_activations)
|
| 19 |
+
target_layer.register_full_backward_hook(self._save_gradients)
|
| 20 |
+
|
| 21 |
+
def _save_activations(self, module, input, output):
|
| 22 |
+
self.activations = output.detach()
|
| 23 |
+
|
| 24 |
+
def _save_gradients(self, module, grad_input, grad_output):
|
| 25 |
+
self.gradients = grad_output[0].detach()
|
| 26 |
+
|
| 27 |
+
def generate(self, input_tensor, class_idx=None):
|
| 28 |
+
# Forward pass
|
| 29 |
+
output = self.model(input_tensor)
|
| 30 |
+
if class_idx is None:
|
| 31 |
+
class_idx = output.argmax(dim=1).item()
|
| 32 |
+
|
| 33 |
+
# Zero gradients
|
| 34 |
+
self.model.zero_grad()
|
| 35 |
+
|
| 36 |
+
# Backward pass
|
| 37 |
+
loss = output[0, class_idx]
|
| 38 |
+
loss.backward(retain_graph=True)
|
| 39 |
+
|
| 40 |
+
# GradCAM++ calculation
|
| 41 |
+
grads = self.gradients # [batch, channels, height, width]
|
| 42 |
+
activations = self.activations
|
| 43 |
+
|
| 44 |
+
grads_power_2 = grads ** 2
|
| 45 |
+
grads_power_3 = grads ** 3
|
| 46 |
+
|
| 47 |
+
sum_grads = torch.sum(grads, dim=(2, 3), keepdim=True)
|
| 48 |
+
|
| 49 |
+
eps = 1e-8 # Avoid divide-by-zero
|
| 50 |
+
alpha_numer = grads_power_2
|
| 51 |
+
alpha_denom = 2 * grads_power_2 + sum_grads * grads_power_3
|
| 52 |
+
alpha_denom = torch.where(alpha_denom != 0.0, alpha_denom, torch.ones_like(alpha_denom))
|
| 53 |
+
alphas = alpha_numer / alpha_denom
|
| 54 |
+
|
| 55 |
+
weights = (alphas * F.relu(grads)).sum(dim=(2, 3), keepdim=True)
|
| 56 |
+
|
| 57 |
+
cam = (weights * activations).sum(dim=1).squeeze()
|
| 58 |
+
|
| 59 |
+
cam = F.relu(cam)
|
| 60 |
+
cam = cam.cpu().numpy()
|
| 61 |
+
cam = cv2.resize(cam, (input_tensor.shape[2], input_tensor.shape[3]))
|
| 62 |
+
cam = (cam - np.min(cam)) / (np.max(cam) - np.min(cam) + eps)
|
| 63 |
+
return cam
|
src/model/predict.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
from PIL import Image
|
| 3 |
+
|
| 4 |
+
def predict_single_image(model, image_path, transform, class_idx_to_name, device):
|
| 5 |
+
"""
|
| 6 |
+
Predict the class of a single image.
|
| 7 |
+
|
| 8 |
+
Args:
|
| 9 |
+
model: Trained model
|
| 10 |
+
image_path (str): Path to the image
|
| 11 |
+
transform: Transformations to apply
|
| 12 |
+
class_idx_to_name (dict): Mapping from class index to class name
|
| 13 |
+
device: torch.device
|
| 14 |
+
"""
|
| 15 |
+
model.eval()
|
| 16 |
+
|
| 17 |
+
img = Image.open(image_path).convert("RGB")
|
| 18 |
+
img = transform(img).unsqueeze(0) # Add batch dimension
|
| 19 |
+
img = img.to(device)
|
| 20 |
+
|
| 21 |
+
with torch.no_grad():
|
| 22 |
+
output = model(img)
|
| 23 |
+
_, pred = torch.max(output, 1)
|
| 24 |
+
|
| 25 |
+
predicted_class = class_idx_to_name[pred.item()]
|
| 26 |
+
return predicted_class
|
src/model/train.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
|
| 4 |
+
def train_one_epoch(model, dataloader, optimizer, criterion, device):
|
| 5 |
+
model.train()
|
| 6 |
+
running_loss = 0.0
|
| 7 |
+
correct = 0
|
| 8 |
+
total = 0
|
| 9 |
+
|
| 10 |
+
for images, labels in dataloader:
|
| 11 |
+
images, labels = images.to(device), labels.to(device)
|
| 12 |
+
|
| 13 |
+
optimizer.zero_grad()
|
| 14 |
+
outputs = model(images)
|
| 15 |
+
loss = criterion(outputs, labels)
|
| 16 |
+
loss.backward()
|
| 17 |
+
optimizer.step()
|
| 18 |
+
|
| 19 |
+
running_loss += loss.item()
|
| 20 |
+
_, preds = outputs.max(1)
|
| 21 |
+
correct += preds.eq(labels).sum().item()
|
| 22 |
+
total += labels.size(0)
|
| 23 |
+
|
| 24 |
+
epoch_loss = running_loss / len(dataloader)
|
| 25 |
+
epoch_acc = correct / total
|
| 26 |
+
return epoch_loss, epoch_acc
|
| 27 |
+
|
| 28 |
+
def validate_one_epoch(model, dataloader, criterion, device):
|
| 29 |
+
model.eval()
|
| 30 |
+
running_loss = 0.0
|
| 31 |
+
correct = 0
|
| 32 |
+
total = 0
|
| 33 |
+
|
| 34 |
+
with torch.no_grad():
|
| 35 |
+
for images, labels in dataloader:
|
| 36 |
+
images, labels = images.to(device), labels.to(device)
|
| 37 |
+
|
| 38 |
+
outputs = model(images)
|
| 39 |
+
loss = criterion(outputs, labels)
|
| 40 |
+
|
| 41 |
+
running_loss += loss.item()
|
| 42 |
+
_, preds = outputs.max(1)
|
| 43 |
+
correct += preds.eq(labels).sum().item()
|
| 44 |
+
total += labels.size(0)
|
| 45 |
+
|
| 46 |
+
epoch_loss = running_loss / len(dataloader)
|
| 47 |
+
epoch_acc = correct / total
|
| 48 |
+
return epoch_loss, epoch_acc
|
src/models/cropguard_best.pt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:926993adb8aba8af6cc714d23b580bf1d4d8a690b89ab5a9904ede4d57166e76
|
| 3 |
+
size 9225099
|
src/models/labels.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"0": "grape___Grape___Black_rot",
|
| 3 |
+
"1": "grape___Grape___Esca_(Black_Measles)",
|
| 4 |
+
"2": "grape___Grape___Leaf_blight_(Isariopsis_Leaf_Spot)",
|
| 5 |
+
"3": "grape___Grape___healthy",
|
| 6 |
+
"4": "potato___Potato___Early_blight",
|
| 7 |
+
"5": "potato___Potato___Late_blight",
|
| 8 |
+
"6": "potato___Potato___healthy",
|
| 9 |
+
"7": "tomato___Tomato___Bacterial_spot",
|
| 10 |
+
"8": "tomato___Tomato___Early_blight",
|
| 11 |
+
"9": "tomato___Tomato___Late_blight",
|
| 12 |
+
"10": "tomato___Tomato___Leaf_Mold",
|
| 13 |
+
"11": "tomato___Tomato___Septoria_leaf_spot",
|
| 14 |
+
"12": "tomato___Tomato___Spider_mites Two-spotted_spider_mite",
|
| 15 |
+
"13": "tomato___Tomato___Target_Spot",
|
| 16 |
+
"14": "tomato___Tomato___Tomato_Yellow_Leaf_Curl_Virus",
|
| 17 |
+
"15": "tomato___Tomato___Tomato_mosaic_virus",
|
| 18 |
+
"16": "tomato___Tomato___healthy"
|
| 19 |
+
}
|
src/sample_images/grape_grape_esca_(black_measles)_sample_8.jpg
ADDED
|
src/sample_images/grape_grape_leaf_blight_(isariopsis_leaf_spot)_sample_7.jpg
ADDED
|
src/sample_images/grape_healthy_sample_5.jpg
ADDED
|
src/sample_images/grape_healthy_sample_6.jpg
ADDED
|
src/sample_images/potato_healthy_sample_1.jpg
ADDED
|
src/sample_images/potato_healthy_sample_2.jpg
ADDED
|
src/sample_images/potato_potato_early_blight_sample_3.jpg
ADDED
|
src/sample_images/potato_potato_early_blight_sample_4.jpg
ADDED
|
src/sample_images/tomato_healthy_sample_10.jpg
ADDED
|
src/sample_images/tomato_healthy_sample_9.jpg
ADDED
|
src/sample_images/tomato_tomato_late_blight_sample_11.jpg
ADDED
|
src/sample_images/tomato_tomato_spider_mites_two-spotted_spider_mite_sample_12.jpg
ADDED
|
src/utils/__init__.py
ADDED
|
File without changes
|
src/utils/config.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
|
| 3 |
+
# -------------------------------
|
| 4 |
+
# Paths
|
| 5 |
+
# -------------------------------
|
| 6 |
+
|
| 7 |
+
# Dynamically find the real project root (CropGuard/)
|
| 8 |
+
CURRENT_FILE = os.path.abspath(__file__)
|
| 9 |
+
SRC_DIR = os.path.dirname(os.path.dirname(CURRENT_FILE)) # src/
|
| 10 |
+
PROJECT_ROOT = os.path.dirname(SRC_DIR) # CropGuard/
|
| 11 |
+
|
| 12 |
+
DATA_DIR = os.path.join(PROJECT_ROOT, "data")
|
| 13 |
+
CLEAN_DIR = os.path.join(DATA_DIR, "clean")
|
| 14 |
+
SPLIT_DIR = os.path.join(DATA_DIR, "split")
|
| 15 |
+
|
| 16 |
+
TRAIN_DIR = os.path.join(SPLIT_DIR, "train")
|
| 17 |
+
VAL_DIR = os.path.join(SPLIT_DIR, "val")
|
| 18 |
+
TEST_DIR = os.path.join(SPLIT_DIR, "test")
|
| 19 |
+
|
| 20 |
+
MODEL_DIR = os.path.join(PROJECT_ROOT, "models")
|
| 21 |
+
BEST_MODEL_PATH = os.path.join(MODEL_DIR, "cropguard_best.pt")
|
| 22 |
+
LABELS_MAP_PATH = os.path.join(MODEL_DIR, "labels.json")
|
| 23 |
+
|
| 24 |
+
# Target folders for download.py
|
| 25 |
+
RAW_DIR = os.path.join(DATA_DIR, "plant_disease_raw")
|
| 26 |
+
CLEAN_DIR = os.path.join(DATA_DIR, "clean")
|
| 27 |
+
|
| 28 |
+
# Target crops
|
| 29 |
+
TARGET_CROPS = ["Potato___", "Tomato___", "Grape___"]
|
| 30 |
+
|
| 31 |
+
# Binary classification mapping (0=healthy, 1=sick)
|
| 32 |
+
BINARY_CLASSES = {
|
| 33 |
+
"healthy": 0,
|
| 34 |
+
"sick": 1
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
# -------------------------------
|
| 38 |
+
# Random Seed
|
| 39 |
+
# -------------------------------
|
| 40 |
+
|
| 41 |
+
SEED = 42
|
src/utils/metrics.py
ADDED
|
File without changes
|
src/utils/transforms.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torchvision.transforms as T
|
| 2 |
+
|
| 3 |
+
def get_transforms(phase="train"):
|
| 4 |
+
if phase == "train":
|
| 5 |
+
return T.Compose([
|
| 6 |
+
T.RandomResizedCrop(224),
|
| 7 |
+
T.RandomHorizontalFlip(),
|
| 8 |
+
T.RandomRotation(20),
|
| 9 |
+
T.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2),
|
| 10 |
+
T.ToTensor(),
|
| 11 |
+
T.Normalize(mean=[0.485, 0.456, 0.406],
|
| 12 |
+
std=[0.229, 0.224, 0.225]),
|
| 13 |
+
])
|
| 14 |
+
else:
|
| 15 |
+
return T.Compose([
|
| 16 |
+
T.Resize((224, 224)),
|
| 17 |
+
T.ToTensor(),
|
| 18 |
+
T.Normalize(mean=[0.485, 0.456, 0.406],
|
| 19 |
+
std=[0.229, 0.224, 0.225]),
|
| 20 |
+
])
|
src/{__init__.py}
ADDED
|
File without changes
|