from google.colab import drive drive.mount('/content/drive') cd /content/drive/MyDrive/yolov12-main pip install thop import os Base directory (change this to your own project path) base_dir = r"/content/drive/MyDrive/yolov12-main" Dataset base name base_name = "Kahramanmaras_EQ" # Example:"Kahramanmaras_EQ" List of splits splits = ["mirrored", "mirrored_cross1", "mirrored_cross2", "mirrored_cross3", "mirrored_cross4"] Image file extensions to include image_exts = ('.png', '.jpg', '.jpeg') for split in splits: dataset_folder = f"{base_name}_{split}" images_dir = os.path.join(base_dir, dataset_folder, "images") Define input directories train_image_dir = os.path.join(images_dir, "train") valid_image_dir = os.path.join(images_dir, "valid") test_image_dir = os.path.join(images_dir, "test") Define output txt paths train_txt_path = os.path.join(base_dir, dataset_folder, "train.txt") valid_txt_path = os.path.join(base_dir, dataset_folder, "valid.txt") test_txt_path = os.path.join(base_dir, dataset_folder, "test.txt") Function to write file list def write_txt_file(image_dir, txt_path): with open(txt_path, 'w') as txt_file: for image_file in sorted(os.listdir(image_dir)): if image_file.lower().endswith(image_exts): image_path = os.path.join(image_dir, image_file) txt_file.write(f"{image_path.replace(os.sep, '/')}\n") Generate txt files write_txt_file(train_image_dir, train_txt_path) write_txt_file(valid_image_dir, valid_txt_path) write_txt_file(test_image_dir, test_txt_path) print(f"{dataset_folder}: train.txt, valid.txt, test.txt files successfully created.") import os Base directory (change this to your own project path) Example: "/home/user/yolov12-main" or "C:/Users/Student/yolov12-main" base_dir = r"/content/drive/MyDrive/yolov12-main" Output directory for YAML files output_dir = os.path.join(base_dir, "ultralytics", "data") Dataset base name (set this to their dataset prefix) base_name = "Kahramanmaras_EQ" # Example:"Kahramanmaras_EQ" List of dataset splits splits = ["mirrored", "mirrored_cross1", "mirrored_cross2", "mirrored_cross3", "mirrored_cross4"] Class information nc = 3 names = ["non-collapsed building", "collapsed building", "tent"] Create YAML files for each split for split in splits: dataset_folder = f"{base_name}_{split}" dataset_path = os.path.join(base_dir, dataset_folder) Content for train YAML train_yaml = f"""# {base_name} Dataset train: {dataset_path}/train.txt val: {dataset_path}/valid.txt nc: {nc} names: {names[0]} {names[1]} {names[2]} """ Content for test YAML test_yaml = f"""# {base_name} Test Dataset train: {dataset_path}/train.txt val: {dataset_path}/valid.txt test: {dataset_path}/test.txt nc: {nc} names: {names[0]} {names[1]} {names[2]} """ Define file paths train_yaml_path = os.path.join(output_dir, f"{dataset_folder}.yaml") test_yaml_path = os.path.join(output_dir, f"{dataset_folder}_test.yaml") Write YAML files with open(train_yaml_path, "w") as f: f.write(train_yaml) with open(test_yaml_path, "w") as f: f.write(test_yaml) print(f"Saved: {train_yaml_path}") print(f"Saved: {test_yaml_path}") pip install ultralytics from ultralytics import YOLO if _name_ == "__main__": Load YOLOv12 model from config Example: YOLOv12 → "ultralytics/cfg/models/v12/yolov12.yaml" model = YOLO(r"/content/drive/MyDrive/yolov12-main/ultralytics/cfg/models/v12/yolov12.yaml") Start training model.train( data=r"/content/drive/MyDrive/yolov12-main/ultralytics/data/Kahramanmaras_EQ_mirrored.yaml", # replace with your dataset YAML epochs=150, # number of training epochs (increase for better performance, decrease for quick testing) batch=8, # batch size (increase if you have more GPU memory; decrease if you get out-of-memory errors) imgsz=1280, # input image size device="0", # GPU device (e.g., "0"); to use CPU, change this to "cpu" workers=0, # set to 0 for Windows to avoid multiprocessing errors save=True # save trained model )