From 58e043744795c694f4e4cbff290ca1ec2c9fc9cb Mon Sep 17 00:00:00 2001 From: nathanhubens Date: Thu, 27 Jun 2024 13:31:54 +0200 Subject: [PATCH] update YOLO --- nbs/11_tutorial.YOLOV8.ipynb | 4983 +++++----------------------------- 1 file changed, 723 insertions(+), 4260 deletions(-) diff --git a/nbs/11_tutorial.YOLOV8.ipynb b/nbs/11_tutorial.YOLOV8.ipynb index 606dbee..045259e 100644 --- a/nbs/11_tutorial.YOLOV8.ipynb +++ b/nbs/11_tutorial.YOLOV8.ipynb @@ -21,6 +21,8 @@ "metadata": {}, "outputs": [], "source": [ + "#| include: false\n", + "\n", "import argparse\n", "import math\n", "import os\n", @@ -64,6 +66,8 @@ "metadata": {}, "outputs": [], "source": [ + "#| include: false\n", + "\n", "def infer_shortcut(bottleneck):\n", " c1 = bottleneck.cv1.conv.in_channels\n", " c2 = bottleneck.cv2.conv.out_channels\n", @@ -244,6 +248,8 @@ "metadata": {}, "outputs": [], "source": [ + "#| include: false\n", + "\n", "def prune(args):\n", " # load trained yolov8 model\n", " model = YOLO(args.model)\n", @@ -253,6 +259,7 @@ " \n", " pruning_cfg['data'] = \"coco128.yaml\"\n", " pruning_cfg['epochs'] = 10\n", + " pruning_cfg['verbose'] = False\n", " \n", " model.model.train()\n", " replace_c2f_with_c2f_v2(model.model)\n", @@ -279,6 +286,7 @@ " pruning_cfg['name'] = f\"baseline_val\"\n", " pruning_cfg['batch'] = 1\n", " \n", + " \n", " validation_model.model.model = deepcopy(model.model.model)\n", " metric = validation_model.val(**pruning_cfg)\n", " init_map = metric.box.map\n", @@ -346,7 +354,7 @@ " pruning_cfg['batch'] = 1\n", " validation_model = YOLO(model.trainer.best)\n", " validation_model.model = deepcopy(model.model)\n", - " metric = validation_model.val(**pruning_cfg)\n", + " metric = validation_model.val( **pruning_cfg)\n", " current_map = metric.box.map\n", " print(f\"After fine tuning mAP={current_map}\")\n", "\n", @@ -386,87 +394,17 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 43668288 parameters, 0 gradients, 165.2 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.731 0.768 0.828 0.659\n", - " person 128 254 0.848 0.776 0.856 0.675\n", - " bicycle 128 6 0.714 0.333 0.668 0.507\n", - " car 128 46 0.921 0.504 0.645 0.355\n", - " motorcycle 128 5 0.825 1 0.995 0.874\n", - " airplane 128 6 0.905 1 0.995 0.98\n", - " bus 128 7 0.751 0.714 0.815 0.719\n", - " train 128 3 0.809 1 0.995 0.995\n", - " truck 128 12 0.612 0.5 0.541 0.339\n", - " boat 128 6 1 0.731 0.836 0.572\n", - " traffic light 128 14 0.999 0.286 0.499 0.262\n", - " stop sign 128 2 0.761 1 0.995 0.895\n", - " bench 128 9 0.711 0.667 0.746 0.57\n", - " bird 128 16 0.95 1 0.995 0.698\n", - " cat 128 4 0.829 1 0.995 0.906\n", - " dog 128 9 0.763 1 0.995 0.911\n", - " horse 128 2 0.74 1 0.995 0.798\n", - " elephant 128 17 0.956 0.941 0.954 0.859\n", - " bear 128 1 0.583 1 0.995 0.895\n", - " zebra 128 4 0.848 1 0.995 0.974\n", - " giraffe 128 9 0.862 1 0.995 0.817\n", - " backpack 128 6 0.681 0.667 0.708 0.507\n", - " umbrella 128 18 0.704 0.889 0.931 0.684\n", - " handbag 128 19 0.613 0.421 0.491 0.335\n", - " tie 128 7 0.845 0.857 0.841 0.655\n", - " suitcase 128 4 0.635 0.75 0.888 0.65\n", - " frisbee 128 5 0.617 0.8 0.813 0.723\n", - " skis 128 1 0.561 1 0.995 0.895\n", - " snowboard 128 7 1 0.854 0.886 0.779\n", - " sports ball 128 6 0.634 0.667 0.69 0.395\n", - " kite 128 10 0.529 0.3 0.507 0.138\n", - " baseball bat 128 4 0.777 1 0.945 0.43\n", - " baseball glove 128 7 0.636 0.571 0.584 0.364\n", - " skateboard 128 5 0.661 0.783 0.769 0.515\n", - " tennis racket 128 7 0.588 0.615 0.67 0.293\n", - " bottle 128 18 0.66 0.649 0.653 0.44\n", - " wine glass 128 16 0.86 0.688 0.861 0.58\n", - " cup 128 36 0.811 0.778 0.882 0.631\n", - " fork 128 6 0.518 0.5 0.509 0.367\n", - " knife 128 16 0.668 0.75 0.783 0.588\n", - " spoon 128 22 0.724 0.636 0.709 0.546\n", - " bowl 128 28 0.821 0.786 0.812 0.698\n", - " banana 128 1 0.606 1 0.995 0.995\n", - " sandwich 128 2 0.687 1 0.995 0.995\n", - " orange 128 4 1 0 0.995 0.793\n", - " broccoli 128 11 0.574 0.249 0.413 0.324\n", - " carrot 128 24 0.803 0.681 0.796 0.592\n", - " hot dog 128 2 0.557 1 0.995 0.995\n", - " pizza 128 5 0.722 1 0.995 0.87\n", - " donut 128 14 0.717 1 0.967 0.898\n", - " cake 128 4 0.784 1 0.995 0.904\n", - " chair 128 35 0.585 0.8 0.816 0.558\n", - " couch 128 6 0.754 1 0.924 0.755\n", - " potted plant 128 14 0.765 0.7 0.839 0.627\n", - " bed 128 3 0.592 1 0.995 0.798\n", - " dining table 128 13 0.578 0.769 0.745 0.618\n", - " toilet 128 2 0.758 1 0.995 0.946\n", - " tv 128 2 0.474 1 0.995 0.946\n", - " laptop 128 3 0.754 0.667 0.755 0.714\n", - " mouse 128 2 1 0 0.663 0.34\n", - " remote 128 8 1 0.721 0.753 0.691\n", - " cell phone 128 8 0.73 0.625 0.647 0.43\n", - " microwave 128 3 0.782 1 0.995 0.898\n", - " oven 128 5 0.263 0.4 0.371 0.288\n", - " sink 128 6 0.378 0.667 0.457 0.289\n", - " refrigerator 128 5 0.649 0.8 0.906 0.754\n", - " book 128 29 0.651 0.276 0.451 0.302\n", - " clock 128 9 0.727 1 0.955 0.809\n", - " vase 128 2 0.291 1 0.995 0.995\n", - " scissors 128 1 0.991 1 0.995 0.298\n", - " teddy bear 128 21 0.93 0.762 0.934 0.695\n", - " toothbrush 128 5 0.875 1 0.995 0.833\n", - "Speed: 0.6ms preprocess, 8.5ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/val56\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=None, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/train46\n", - "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n" + "Speed: 0.1ms preprocess, 7.7ms inference, 0.0ms loss, 0.6ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/val59\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=None, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/train49\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] }, { @@ -480,227 +418,84 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n", - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/train46/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/train49/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/train46\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/train49\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 10.7G 0.8537 0.7447 1.082 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 14.4G 0.8537 0.7447 1.082 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.776 0.741 0.832 0.667\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.4G 0.8612 0.7059 1.079 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 12.8G 0.8612 0.7059 1.079 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.859 0.75 0.861 0.697\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.3G 0.8249 0.6306 1.054 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12.7G 0.8249 0.6306 1.054 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.882 0.753 0.862 0.709\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.4G 0.7998 0.5746 1.047 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 12.8G 0.7998 0.5746 1.047 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.882 0.799 0.87 0.721\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.4G 0.8028 0.5566 1.034 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.1G 0.8028 0.5566 1.034 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.88 0.804 0.876 0.728\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.4G 0.8042 0.5415 1.047 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 12.8G 0.8042 0.5415 1.047 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.896 0.833 0.901 0.741\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.4G 0.7493 0.5095 1.003 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 12.8G 0.7493 0.5095 1.003 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.906 0.827 0.902 0.746\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.4G 0.7589 0.5373 1.012 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 12.8G 0.7589 0.5373 1.012 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.91 0.828 0.903 0.749\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.4G 0.7234 0.4783 0.9947 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 12.8G 0.7234 0.4783 0.9947 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.912 0.832 0.906 0.754\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.3G 0.7445 0.4764 0.9944 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12.7G 0.7445 0.4764 0.9944 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.905 0.839 0.905 0.754\n", "\n", - "10 epochs completed in 0.028 hours.\n", - "Optimizer stripped from runs/detect/train46/weights/last.pt, 175.3MB\n", - "Optimizer stripped from runs/detect/train46/weights/best.pt, 175.3MB\n", + "10 epochs completed in 0.027 hours.\n", + "Optimizer stripped from runs/detect/train49/weights/last.pt, 175.3MB\n", + "Optimizer stripped from runs/detect/train49/weights/best.pt, 175.3MB\n", "\n", - "Validating runs/detect/train46/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/train49/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 43668288 parameters, 0 gradients, 165.2 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.904 0.841 0.905 0.755\n", - " person 128 254 0.953 0.752 0.9 0.727\n", - " bicycle 128 6 0.993 0.667 0.706 0.584\n", - " car 128 46 0.956 0.348 0.677 0.381\n", - " motorcycle 128 5 0.938 1 0.995 0.884\n", - " airplane 128 6 0.939 1 0.995 0.943\n", - " bus 128 7 0.944 1 0.995 0.901\n", - " train 128 3 0.911 1 0.995 0.963\n", - " truck 128 12 0.918 0.583 0.744 0.568\n", - " boat 128 6 1 0.661 0.85 0.658\n", - " traffic light 128 14 1 0.488 0.574 0.316\n", - " stop sign 128 2 0.848 1 0.995 0.895\n", - " bench 128 9 1 0.929 0.995 0.873\n", - " bird 128 16 1 0.947 0.995 0.81\n", - " cat 128 4 0.911 1 0.995 0.853\n", - " dog 128 9 0.947 1 0.995 0.963\n", - " horse 128 2 0.869 1 0.995 0.803\n", - " elephant 128 17 1 0.881 0.959 0.81\n", - " bear 128 1 0.764 1 0.995 0.995\n", - " zebra 128 4 0.913 1 0.995 0.968\n", - " giraffe 128 9 0.962 1 0.995 0.921\n", - " backpack 128 6 0.87 0.833 0.839 0.708\n", - " umbrella 128 18 0.906 1 0.995 0.791\n", - " handbag 128 19 0.904 0.737 0.786 0.596\n", - " tie 128 7 0.979 0.857 0.861 0.74\n", - " suitcase 128 4 1 0.926 0.995 0.724\n", - " frisbee 128 5 0.937 0.8 0.886 0.799\n", - " skis 128 1 0.79 1 0.995 0.995\n", - " snowboard 128 7 0.963 0.857 0.87 0.794\n", - " sports ball 128 6 0.754 0.514 0.762 0.423\n", - " kite 128 10 1 0.462 0.711 0.317\n", - " baseball bat 128 4 1 1 0.995 0.727\n", - " baseball glove 128 7 0.849 0.429 0.563 0.36\n", - " skateboard 128 5 1 0.771 0.938 0.648\n", - " tennis racket 128 7 1 0.686 0.723 0.552\n", - " bottle 128 18 0.959 0.611 0.864 0.65\n", - " wine glass 128 16 0.908 0.615 0.909 0.617\n", - " cup 128 36 0.955 0.917 0.973 0.768\n", - " fork 128 6 0.922 0.833 0.909 0.685\n", - " knife 128 16 0.926 0.783 0.908 0.717\n", - " spoon 128 22 0.949 0.773 0.845 0.634\n", - " bowl 128 28 0.957 0.796 0.936 0.782\n", - " banana 128 1 0.749 1 0.995 0.895\n", - " sandwich 128 2 0.833 1 0.995 0.995\n", - " orange 128 4 0.791 1 0.995 0.859\n", - " broccoli 128 11 0.844 0.545 0.643 0.458\n", - " carrot 128 24 0.954 0.867 0.955 0.754\n", - " hot dog 128 2 0.834 1 0.995 0.995\n", - " pizza 128 5 0.921 1 0.995 0.914\n", - " donut 128 14 0.892 1 0.99 0.925\n", - " cake 128 4 0.91 1 0.995 0.947\n", - " chair 128 35 0.947 0.686 0.92 0.67\n", - " couch 128 6 0.699 0.833 0.903 0.651\n", - " potted plant 128 14 0.872 1 0.995 0.838\n", - " bed 128 3 0.881 1 0.995 0.941\n", - " dining table 128 13 0.848 0.858 0.926 0.843\n", - " toilet 128 2 0.845 1 0.995 0.995\n", - " tv 128 2 0.852 1 0.995 0.947\n", - " laptop 128 3 1 0.95 0.995 0.93\n", - " mouse 128 2 0.745 0.5 0.828 0.483\n", - " remote 128 8 1 0.701 0.765 0.675\n", - " cell phone 128 8 0.892 0.625 0.664 0.485\n", - " microwave 128 3 0.892 1 0.995 0.995\n", - " oven 128 5 0.787 0.8 0.759 0.66\n", - " sink 128 6 0.769 0.565 0.737 0.572\n", - " refrigerator 128 5 0.82 1 0.995 0.89\n", - " book 128 29 0.863 0.414 0.726 0.519\n", - " clock 128 9 1 0.997 0.995 0.915\n", - " vase 128 2 0.878 1 0.995 0.921\n", - " scissors 128 1 0.686 1 0.995 0.398\n", - " teddy bear 128 21 0.917 0.905 0.948 0.772\n", - " toothbrush 128 5 0.956 1 0.995 0.895\n", - "Speed: 0.1ms preprocess, 4.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/train46\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Speed: 0.1ms preprocess, 4.2ms inference, 0.0ms loss, 0.3ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/train49\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 43668288 parameters, 0 gradients, 165.2 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.917 0.823 0.901 0.754\n", - " person 128 254 0.964 0.731 0.902 0.724\n", - " bicycle 128 6 1 0.605 0.713 0.565\n", - " car 128 46 1 0.334 0.704 0.384\n", - " motorcycle 128 5 0.942 1 0.995 0.904\n", - " airplane 128 6 0.95 1 0.995 0.931\n", - " bus 128 7 0.983 1 0.995 0.862\n", - " train 128 3 0.917 1 0.995 0.93\n", - " truck 128 12 0.959 0.583 0.734 0.558\n", - " boat 128 6 1 0.63 0.849 0.658\n", - " traffic light 128 14 1 0.452 0.572 0.323\n", - " stop sign 128 2 0.867 1 0.995 0.947\n", - " bench 128 9 0.973 0.889 0.975 0.838\n", - " bird 128 16 1 0.919 0.995 0.829\n", - " cat 128 4 0.921 1 0.995 0.923\n", - " dog 128 9 0.95 1 0.995 0.946\n", - " horse 128 2 0.891 1 0.995 0.802\n", - " elephant 128 17 1 0.889 0.958 0.801\n", - " bear 128 1 0.793 1 0.995 0.995\n", - " zebra 128 4 0.925 1 0.995 0.995\n", - " giraffe 128 9 0.967 1 0.995 0.909\n", - " backpack 128 6 0.889 0.833 0.839 0.732\n", - " umbrella 128 18 0.92 1 0.995 0.792\n", - " handbag 128 19 0.863 0.737 0.777 0.603\n", - " tie 128 7 1 0.811 0.861 0.747\n", - " suitcase 128 4 1 0.887 0.995 0.767\n", - " frisbee 128 5 0.959 0.8 0.82 0.745\n", - " skis 128 1 0.824 1 0.995 0.995\n", - " snowboard 128 7 0.98 0.857 0.87 0.794\n", - " sports ball 128 6 0.833 0.5 0.761 0.422\n", - " kite 128 10 1 0.416 0.717 0.333\n", - " baseball bat 128 4 0.787 0.75 0.752 0.727\n", - " baseball glove 128 7 0.886 0.429 0.561 0.371\n", - " skateboard 128 5 1 0.704 0.938 0.615\n", - " tennis racket 128 7 1 0.664 0.723 0.558\n", - " bottle 128 18 0.918 0.62 0.908 0.642\n", - " wine glass 128 16 0.9 0.563 0.909 0.616\n", - " cup 128 36 0.969 0.877 0.973 0.778\n", - " fork 128 6 0.955 0.833 0.922 0.703\n", - " knife 128 16 0.921 0.733 0.908 0.709\n", - " spoon 128 22 0.962 0.773 0.841 0.629\n", - " bowl 128 28 0.971 0.786 0.934 0.77\n", - " banana 128 1 0.831 1 0.995 0.995\n", - " sandwich 128 2 0.853 1 0.995 0.995\n", - " orange 128 4 0.829 1 0.995 0.859\n", - " broccoli 128 11 0.819 0.416 0.649 0.472\n", - " carrot 128 24 0.946 0.833 0.946 0.736\n", - " hot dog 128 2 0.853 1 0.995 0.995\n", - " pizza 128 5 0.929 1 0.995 0.831\n", - " donut 128 14 0.89 1 0.99 0.921\n", - " cake 128 4 0.919 1 0.995 0.909\n", - " chair 128 35 1 0.642 0.908 0.658\n", - " couch 128 6 0.702 0.833 0.874 0.665\n", - " potted plant 128 14 1 0.947 0.995 0.833\n", - " bed 128 3 0.894 1 0.995 0.887\n", - " dining table 128 13 0.87 0.846 0.911 0.857\n", - " toilet 128 2 0.867 1 0.995 0.995\n", - " tv 128 2 0.868 1 0.995 0.946\n", - " laptop 128 3 1 0.908 0.995 0.897\n", - " mouse 128 2 0.788 0.5 0.828 0.483\n", - " remote 128 8 1 0.686 0.764 0.7\n", - " cell phone 128 8 0.917 0.625 0.661 0.499\n", - " microwave 128 3 0.907 1 0.995 0.941\n", - " oven 128 5 0.954 0.8 0.799 0.659\n", - " sink 128 6 0.618 0.5 0.716 0.55\n", - " refrigerator 128 5 0.828 1 0.995 0.925\n", - " book 128 29 0.883 0.414 0.701 0.498\n", - " clock 128 9 1 0.972 0.995 0.918\n", - " vase 128 2 0.954 1 0.995 0.995\n", - " scissors 128 1 0.757 1 0.995 0.398\n", - " teddy bear 128 21 0.963 0.905 0.946 0.778\n", - " toothbrush 128 5 0.958 1 0.995 0.895\n", - "Speed: 0.2ms preprocess, 11.3ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/baseline_val182\u001b[0m\n" + "Speed: 0.2ms preprocess, 10.8ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/baseline_val184\u001b[0m\n" ] }, { @@ -715,7 +510,7 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -733,84 +528,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 43325836 parameters, 74176 gradients, 163.3 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.883 0.849 0.903 0.743\n", - " person 128 254 0.95 0.756 0.891 0.713\n", - " bicycle 128 6 0.963 0.667 0.733 0.597\n", - " car 128 46 1 0.41 0.697 0.353\n", - " motorcycle 128 5 0.913 1 0.995 0.905\n", - " airplane 128 6 0.926 1 0.995 0.916\n", - " bus 128 7 0.906 1 0.995 0.835\n", - " train 128 3 0.865 1 0.995 0.952\n", - " truck 128 12 0.943 0.583 0.759 0.514\n", - " boat 128 6 1 0.66 0.851 0.645\n", - " traffic light 128 14 1 0.441 0.587 0.351\n", - " stop sign 128 2 0.814 1 0.995 0.995\n", - " bench 128 9 0.965 0.889 0.975 0.807\n", - " bird 128 16 1 0.957 0.995 0.805\n", - " cat 128 4 0.882 1 0.995 0.888\n", - " dog 128 9 0.939 1 0.995 0.939\n", - " horse 128 2 0.854 1 0.995 0.81\n", - " elephant 128 17 1 0.911 0.949 0.806\n", - " bear 128 1 0.716 1 0.995 0.895\n", - " zebra 128 4 0.893 1 0.995 0.995\n", - " giraffe 128 9 0.931 1 0.995 0.895\n", - " backpack 128 6 0.83 0.833 0.84 0.731\n", - " umbrella 128 18 0.899 1 0.992 0.8\n", - " handbag 128 19 0.868 0.684 0.787 0.61\n", - " tie 128 7 0.964 0.857 0.862 0.757\n", - " suitcase 128 4 0.945 1 0.995 0.699\n", - " frisbee 128 5 0.902 0.8 0.821 0.716\n", - " skis 128 1 0.726 1 0.995 0.895\n", - " snowboard 128 7 0.945 0.857 0.868 0.794\n", - " sports ball 128 6 1 0.646 0.74 0.357\n", - " kite 128 10 1 0.46 0.704 0.332\n", - " baseball bat 128 4 0.715 0.75 0.752 0.703\n", - " baseball glove 128 7 0.838 0.429 0.607 0.379\n", - " skateboard 128 5 1 0.91 0.995 0.649\n", - " tennis racket 128 7 1 0.711 0.723 0.514\n", - " bottle 128 18 1 0.667 0.941 0.684\n", - " wine glass 128 16 0.917 0.687 0.906 0.618\n", - " cup 128 36 0.944 0.933 0.974 0.74\n", - " fork 128 6 1 0.769 0.838 0.638\n", - " knife 128 16 0.874 0.875 0.908 0.72\n", - " spoon 128 22 0.872 0.773 0.808 0.634\n", - " bowl 128 28 0.958 0.786 0.901 0.755\n", - " banana 128 1 0.702 1 0.995 0.995\n", - " sandwich 128 2 0.797 1 0.995 0.995\n", - " orange 128 4 0.818 1 0.995 0.823\n", - " broccoli 128 11 0.839 0.477 0.652 0.505\n", - " carrot 128 24 0.915 0.892 0.951 0.762\n", - " hot dog 128 2 0.801 1 0.995 0.995\n", - " pizza 128 5 0.902 1 0.995 0.842\n", - " donut 128 14 0.888 1 0.99 0.927\n", - " cake 128 4 0.888 1 0.995 0.91\n", - " chair 128 35 0.908 0.771 0.929 0.652\n", - " couch 128 6 0.734 1 0.955 0.75\n", - " potted plant 128 14 0.881 1 0.995 0.804\n", - " bed 128 3 0.855 1 0.995 0.909\n", - " dining table 128 13 0.924 0.846 0.922 0.847\n", - " toilet 128 2 0.838 1 0.995 0.947\n", - " tv 128 2 0.81 1 0.995 0.946\n", - " laptop 128 3 1 0.92 0.995 0.866\n", - " mouse 128 2 1 0.764 0.995 0.452\n", - " remote 128 8 0.896 0.625 0.736 0.645\n", - " cell phone 128 8 0.763 0.625 0.713 0.504\n", - " microwave 128 3 0.861 1 0.995 0.93\n", - " oven 128 5 0.767 0.8 0.799 0.639\n", - " sink 128 6 0.534 0.5 0.564 0.534\n", - " refrigerator 128 5 0.803 1 0.995 0.908\n", - " book 128 29 0.774 0.483 0.715 0.495\n", - " clock 128 9 0.973 1 0.995 0.887\n", - " vase 128 2 0.855 1 0.995 0.921\n", - " scissors 128 1 0.601 1 0.995 0.398\n", - " teddy bear 128 21 0.909 0.905 0.943 0.763\n", - " toothbrush 128 5 0.811 1 0.995 0.851\n", - "Speed: 0.2ms preprocess, 13.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_0_pre_val130\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_0_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_0_finetune102\n", + "Speed: 0.2ms preprocess, 12.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_0_pre_val131\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_0_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_0_finetune103\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -829,149 +553,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_0_finetune102/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_0_finetune103/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_0_finetune102\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_0_finetune103\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 12.2G 0.7161 0.4777 0.9953 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.6G 0.7161 0.4777 0.9953 122 640: 100%|██████████| 8/8 [00:03\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.92 0.841 0.907 0.75\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.8G 0.6503 0.4152 0.9541 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.3G 0.6503 0.4152 0.9541 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.904 0.851 0.91 0.765\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.8G 0.6809 0.434 0.9746 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 13.2G 0.6809 0.434 0.9746 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.902 0.854 0.907 0.768\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.8G 0.6464 0.4095 0.9681 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13.2G 0.6464 0.4095 0.9681 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.919 0.853 0.913 0.773\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.8G 0.6799 0.4357 0.9637 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.2G 0.6799 0.4357 0.9637 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.916 0.856 0.918 0.779\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.8G 0.6787 0.4261 0.9708 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13.2G 0.6787 0.4261 0.9708 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.924 0.851 0.923 0.78\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.8G 0.6758 0.4286 0.957 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13.2G 0.6758 0.4286 0.957 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.91 0.852 0.92 0.785\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.8G 0.6818 0.4492 0.9705 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13.2G 0.6818 0.4492 0.9705 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.899 0.856 0.921 0.786\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.8G 0.6594 0.429 0.9635 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13.2G 0.6594 0.429 0.9635 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.894 0.871 0.925 0.794\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.8G 0.6705 0.4277 0.9541 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 13.2G 0.6705 0.4277 0.9541 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.899 0.87 0.927 0.793\n", "\n", - "10 epochs completed in 0.040 hours.\n", - "Optimizer stripped from runs/detect/step_0_finetune102/weights/last.pt, 173.9MB\n", - "Optimizer stripped from runs/detect/step_0_finetune102/weights/best.pt, 173.9MB\n", + "10 epochs completed in 0.038 hours.\n", + "Optimizer stripped from runs/detect/step_0_finetune103/weights/last.pt, 173.9MB\n", + "Optimizer stripped from runs/detect/step_0_finetune103/weights/best.pt, 173.9MB\n", "\n", - "Validating runs/detect/step_0_finetune102/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_0_finetune103/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 43325836 parameters, 0 gradients, 163.3 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.895 0.87 0.925 0.793\n", - " person 128 254 0.948 0.792 0.922 0.754\n", - " bicycle 128 6 0.88 0.667 0.791 0.633\n", - " car 128 46 0.918 0.487 0.688 0.394\n", - " motorcycle 128 5 0.921 1 0.995 0.919\n", - " airplane 128 6 0.933 1 0.995 0.941\n", - " bus 128 7 1 0.959 0.995 0.842\n", - " train 128 3 0.763 1 0.995 0.995\n", - " truck 128 12 0.941 0.667 0.785 0.602\n", - " boat 128 6 0.934 0.833 0.838 0.655\n", - " traffic light 128 14 0.886 0.571 0.628 0.364\n", - " stop sign 128 2 0.849 1 0.995 0.923\n", - " bench 128 9 0.876 1 0.984 0.896\n", - " bird 128 16 0.981 1 0.995 0.892\n", - " cat 128 4 0.94 1 0.995 0.917\n", - " dog 128 9 0.952 1 0.995 0.958\n", - " horse 128 2 0.828 1 0.995 0.995\n", - " elephant 128 17 0.975 0.941 0.987 0.878\n", - " bear 128 1 0.739 1 0.995 0.895\n", - " zebra 128 4 0.9 1 0.995 0.995\n", - " giraffe 128 9 0.949 1 0.995 0.967\n", - " backpack 128 6 0.814 0.833 0.842 0.72\n", - " umbrella 128 18 1 0.934 0.995 0.856\n", - " handbag 128 19 0.879 0.737 0.847 0.643\n", - " tie 128 7 1 0.791 0.864 0.755\n", - " suitcase 128 4 1 0.859 0.995 0.787\n", - " frisbee 128 5 0.906 0.8 0.866 0.793\n", - " skis 128 1 0.754 1 0.995 0.995\n", - " snowboard 128 7 0.937 0.857 0.964 0.843\n", - " sports ball 128 6 0.758 0.528 0.765 0.45\n", - " kite 128 10 1 0.528 0.805 0.362\n", - " baseball bat 128 4 0.67 0.511 0.699 0.656\n", - " baseball glove 128 7 0.878 0.429 0.569 0.34\n", - " skateboard 128 5 0.994 1 0.995 0.687\n", - " tennis racket 128 7 1 0.658 0.724 0.58\n", - " bottle 128 18 1 0.697 0.926 0.693\n", - " wine glass 128 16 0.864 0.793 0.932 0.617\n", - " cup 128 36 0.895 0.944 0.977 0.812\n", - " fork 128 6 1 0.999 0.995 0.8\n", - " knife 128 16 0.818 0.812 0.919 0.72\n", - " spoon 128 22 0.945 0.78 0.857 0.663\n", - " bowl 128 28 0.896 0.857 0.946 0.812\n", - " banana 128 1 0.737 1 0.995 0.995\n", - " sandwich 128 2 0.806 1 0.995 0.995\n", - " orange 128 4 0.863 1 0.995 0.861\n", - " broccoli 128 11 0.955 0.636 0.766 0.566\n", - " carrot 128 24 0.956 0.914 0.976 0.791\n", - " hot dog 128 2 0.823 1 0.995 0.995\n", - " pizza 128 5 0.918 1 0.995 0.976\n", - " donut 128 14 0.906 1 0.995 0.967\n", - " cake 128 4 0.901 1 0.995 0.966\n", - " chair 128 35 0.958 0.829 0.934 0.698\n", - " couch 128 6 0.921 1 0.995 0.899\n", - " potted plant 128 14 0.942 1 0.995 0.869\n", - " bed 128 3 0.871 1 0.995 0.964\n", - " dining table 128 13 0.845 1 0.984 0.853\n", - " toilet 128 2 0.829 1 0.995 0.995\n", - " tv 128 2 0.848 1 0.995 0.949\n", - " laptop 128 3 0.723 1 0.995 0.93\n", - " mouse 128 2 1 0.764 0.995 0.606\n", - " remote 128 8 1 0.724 0.759 0.682\n", - " cell phone 128 8 1 0.833 0.892 0.601\n", - " microwave 128 3 0.881 1 0.995 0.915\n", - " oven 128 5 1 0.784 0.807 0.675\n", - " sink 128 6 0.778 0.594 0.812 0.616\n", - " refrigerator 128 5 0.922 1 0.995 0.849\n", - " book 128 29 0.818 0.466 0.811 0.592\n", - " clock 128 9 0.874 1 0.995 0.883\n", - " vase 128 2 0.841 1 0.995 0.921\n", - " scissors 128 1 0.73 1 0.995 0.995\n", - " teddy bear 128 21 1 0.984 0.995 0.829\n", - " toothbrush 128 5 0.813 1 0.995 0.907\n", - "Speed: 0.1ms preprocess, 5.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_0_finetune102\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 5.1ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_0_finetune103\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -988,83 +641,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 43325836 parameters, 0 gradients, 163.3 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.897 0.863 0.922 0.787\n", - " person 128 254 0.961 0.787 0.921 0.754\n", - " bicycle 128 6 0.933 0.667 0.764 0.618\n", - " car 128 46 0.917 0.482 0.694 0.406\n", - " motorcycle 128 5 0.93 1 0.995 0.885\n", - " airplane 128 6 0.933 1 0.995 0.943\n", - " bus 128 7 1 0.946 0.995 0.832\n", - " train 128 3 0.888 1 0.995 0.91\n", - " truck 128 12 0.886 0.648 0.762 0.589\n", - " boat 128 6 0.979 0.833 0.838 0.651\n", - " traffic light 128 14 0.878 0.513 0.622 0.349\n", - " stop sign 128 2 0.853 1 0.995 0.995\n", - " bench 128 9 0.896 0.964 0.984 0.89\n", - " bird 128 16 0.98 1 0.995 0.893\n", - " cat 128 4 0.944 1 0.995 0.933\n", - " dog 128 9 0.953 1 0.995 0.958\n", - " horse 128 2 0.835 1 0.995 0.948\n", - " elephant 128 17 0.971 0.941 0.985 0.871\n", - " bear 128 1 0.745 1 0.995 0.895\n", - " zebra 128 4 0.903 1 0.995 0.974\n", - " giraffe 128 9 0.952 1 0.995 0.94\n", - " backpack 128 6 0.799 0.833 0.881 0.746\n", - " umbrella 128 18 1 0.966 0.995 0.842\n", - " handbag 128 19 0.969 0.737 0.835 0.658\n", - " tie 128 7 1 0.77 0.863 0.746\n", - " suitcase 128 4 0.872 0.75 0.945 0.728\n", - " frisbee 128 5 0.915 0.8 0.862 0.797\n", - " skis 128 1 0.759 1 0.995 0.995\n", - " snowboard 128 7 0.917 0.857 0.964 0.845\n", - " sports ball 128 6 0.976 0.667 0.797 0.482\n", - " kite 128 10 1 0.503 0.792 0.347\n", - " baseball bat 128 4 0.67 0.5 0.702 0.658\n", - " baseball glove 128 7 1 0.496 0.598 0.365\n", - " skateboard 128 5 1 0.876 0.995 0.688\n", - " tennis racket 128 7 1 0.667 0.724 0.566\n", - " bottle 128 18 1 0.696 0.923 0.655\n", - " wine glass 128 16 0.862 0.783 0.928 0.601\n", - " cup 128 36 0.888 0.917 0.974 0.804\n", - " fork 128 6 0.91 1 0.995 0.79\n", - " knife 128 16 0.866 0.808 0.921 0.707\n", - " spoon 128 22 0.945 0.782 0.855 0.672\n", - " bowl 128 28 0.915 0.857 0.938 0.819\n", - " banana 128 1 0.743 1 0.995 0.995\n", - " sandwich 128 2 0.654 1 0.995 0.995\n", - " orange 128 4 0.855 1 0.995 0.86\n", - " broccoli 128 11 0.975 0.636 0.797 0.607\n", - " carrot 128 24 0.958 0.958 0.975 0.786\n", - " hot dog 128 2 0.822 1 0.995 0.995\n", - " pizza 128 5 0.909 1 0.995 0.976\n", - " donut 128 14 0.911 1 0.995 0.952\n", - " cake 128 4 0.905 1 0.995 0.922\n", - " chair 128 35 0.953 0.8 0.93 0.679\n", - " couch 128 6 0.945 1 0.995 0.897\n", - " potted plant 128 14 0.873 1 0.986 0.873\n", - " bed 128 3 0.859 1 0.995 0.942\n", - " dining table 128 13 0.858 1 0.995 0.848\n", - " toilet 128 2 0.834 1 0.995 0.995\n", - " tv 128 2 0.849 1 0.995 0.947\n", - " laptop 128 3 0.908 1 0.995 0.93\n", - " mouse 128 2 1 0.753 0.995 0.503\n", - " remote 128 8 1 0.715 0.759 0.682\n", - " cell phone 128 8 0.845 0.687 0.738 0.582\n", - " microwave 128 3 0.879 1 0.995 0.914\n", - " oven 128 5 0.791 0.6 0.765 0.668\n", - " sink 128 6 0.78 0.601 0.832 0.613\n", - " refrigerator 128 5 0.928 1 0.995 0.863\n", - " book 128 29 0.778 0.482 0.79 0.578\n", - " clock 128 9 0.884 1 0.995 0.869\n", - " vase 128 2 0.849 1 0.995 0.921\n", - " scissors 128 1 0.746 1 0.995 0.995\n", - " teddy bear 128 21 1 0.979 0.995 0.82\n", - " toothbrush 128 5 0.809 1 0.995 0.924\n", - "Speed: 0.2ms preprocess, 13.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_0_post_val74\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 12.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_0_post_val75\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -1086,85 +668,15 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 43081939 parameters, 74176 gradients, 162.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.913 0.872 0.929 0.788\n", - " person 128 254 0.956 0.787 0.917 0.75\n", - " bicycle 128 6 0.931 0.667 0.791 0.666\n", - " car 128 46 0.922 0.515 0.699 0.406\n", - " motorcycle 128 5 0.924 1 0.995 0.875\n", - " airplane 128 6 0.938 1 0.995 0.933\n", - " bus 128 7 1 0.95 0.995 0.826\n", - " train 128 3 0.912 1 0.995 0.86\n", - " truck 128 12 1 0.627 0.79 0.559\n", - " boat 128 6 1 0.795 0.838 0.664\n", - " traffic light 128 14 0.877 0.51 0.634 0.338\n", - " stop sign 128 2 0.855 1 0.995 0.995\n", - " bench 128 9 0.896 0.961 0.984 0.908\n", - " bird 128 16 0.981 1 0.995 0.888\n", - " cat 128 4 0.959 1 0.995 0.932\n", - " dog 128 9 0.953 1 0.995 0.939\n", - " horse 128 2 0.834 1 0.995 0.949\n", - " elephant 128 17 0.979 0.941 0.987 0.871\n", - " bear 128 1 0.747 1 0.995 0.895\n", - " zebra 128 4 0.904 1 0.995 0.995\n", - " giraffe 128 9 0.954 1 0.995 0.959\n", - " backpack 128 6 0.817 0.833 0.873 0.738\n", - " umbrella 128 18 1 0.967 0.995 0.839\n", - " handbag 128 19 0.884 0.789 0.818 0.649\n", - " tie 128 7 0.933 0.714 0.843 0.711\n", - " suitcase 128 4 0.879 0.75 0.945 0.731\n", - " frisbee 128 5 0.909 0.8 0.82 0.762\n", - " skis 128 1 0.759 1 0.995 0.895\n", - " snowboard 128 7 0.919 0.857 0.978 0.86\n", - " sports ball 128 6 0.96 0.667 0.796 0.515\n", - " kite 128 10 1 0.481 0.799 0.359\n", - " baseball bat 128 4 1 0.816 0.995 0.698\n", - " baseball glove 128 7 1 0.497 0.598 0.355\n", - " skateboard 128 5 1 0.933 0.995 0.687\n", - " tennis racket 128 7 1 0.693 0.722 0.565\n", - " bottle 128 18 1 0.7 0.926 0.655\n", - " wine glass 128 16 0.864 0.796 0.939 0.618\n", - " cup 128 36 0.916 0.91 0.975 0.809\n", - " fork 128 6 0.987 1 0.995 0.751\n", - " knife 128 16 0.927 0.812 0.926 0.728\n", - " spoon 128 22 0.945 0.785 0.859 0.669\n", - " bowl 128 28 0.924 0.857 0.938 0.806\n", - " banana 128 1 0.746 1 0.995 0.995\n", - " sandwich 128 2 0.694 1 0.995 0.995\n", - " orange 128 4 0.866 1 0.995 0.86\n", - " broccoli 128 11 1 0.636 0.793 0.615\n", - " carrot 128 24 0.958 0.958 0.976 0.791\n", - " hot dog 128 2 0.822 1 0.995 0.995\n", - " pizza 128 5 0.917 1 0.995 0.976\n", - " donut 128 14 0.927 1 0.995 0.967\n", - " cake 128 4 0.906 1 0.995 0.922\n", - " chair 128 35 0.96 0.8 0.94 0.683\n", - " couch 128 6 0.947 1 0.995 0.898\n", - " potted plant 128 14 0.872 0.978 0.986 0.862\n", - " bed 128 3 0.864 1 0.995 0.942\n", - " dining table 128 13 0.925 1 0.995 0.854\n", - " toilet 128 2 0.833 1 0.995 0.995\n", - " tv 128 2 0.845 1 0.995 0.947\n", - " laptop 128 3 0.845 1 0.995 0.93\n", - " mouse 128 2 1 0.757 0.995 0.561\n", - " remote 128 8 1 0.732 0.757 0.689\n", - " cell phone 128 8 1 0.844 0.881 0.578\n", - " microwave 128 3 0.878 1 0.995 0.914\n", - " oven 128 5 1 0.765 0.805 0.687\n", - " sink 128 6 0.778 0.594 0.847 0.619\n", - " refrigerator 128 5 0.944 1 0.995 0.926\n", - " book 128 29 0.772 0.467 0.777 0.568\n", - " clock 128 9 0.885 1 0.995 0.908\n", - " vase 128 2 0.85 1 0.995 0.921\n", - " scissors 128 1 0.742 1 0.995 0.995\n", - " teddy bear 128 21 1 0.987 0.995 0.819\n", - " toothbrush 128 5 0.88 1 0.995 0.924\n", - "Speed: 0.2ms preprocess, 13.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_1_pre_val65\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_1_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_1_finetune61\n", - "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n" + "Speed: 0.1ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_1_pre_val66\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_1_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_1_finetune62\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] }, { @@ -1181,150 +693,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n", - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_1_finetune61/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_1_finetune62/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_1_finetune61\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_1_finetune62\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 12.6G 0.5906 0.3832 0.9224 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.3G 0.5906 0.3832 0.9224 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.903 0.868 0.925 0.797\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 12G 0.5313 0.3408 0.9001 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.3G 0.5313 0.3408 0.9001 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.908 0.87 0.925 0.796\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.8G 0.5791 0.3608 0.9246 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 13.2G 0.5791 0.3608 0.9246 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.902 0.871 0.927 0.799\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.9G 0.5612 0.3602 0.9298 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13.3G 0.5612 0.3602 0.9298 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.914 0.867 0.929 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.9G 0.5719 0.3787 0.9132 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.2G 0.5719 0.3787 0.9132 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.906 0.875 0.93 0.792\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.9G 0.5878 0.3844 0.9333 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13.2G 0.5878 0.3844 0.9333 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.935 0.859 0.93 0.796\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.9G 0.5939 0.3776 0.9134 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13.3G 0.5939 0.3776 0.9134 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.928 0.86 0.928 0.799\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.9G 0.6093 0.3903 0.9311 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13.2G 0.6093 0.3903 0.9311 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.862 0.933 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.9G 0.6003 0.386 0.9318 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13.3G 0.6003 0.386 0.9318 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.92 0.877 0.935 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.8G 0.6393 0.408 0.9322 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 13.2G 0.6393 0.408 0.9322 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.911 0.888 0.937 0.806\n", "\n", - "10 epochs completed in 0.037 hours.\n", - "Optimizer stripped from runs/detect/step_1_finetune61/weights/last.pt, 173.0MB\n", - "Optimizer stripped from runs/detect/step_1_finetune61/weights/best.pt, 173.0MB\n", + "10 epochs completed in 0.025 hours.\n", + "Optimizer stripped from runs/detect/step_1_finetune62/weights/last.pt, 173.0MB\n", + "Optimizer stripped from runs/detect/step_1_finetune62/weights/best.pt, 173.0MB\n", "\n", - "Validating runs/detect/step_1_finetune61/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_1_finetune62/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 43081939 parameters, 0 gradients, 162.7 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.911 0.888 0.937 0.806\n", - " person 128 254 0.96 0.843 0.93 0.771\n", - " bicycle 128 6 0.893 0.667 0.811 0.63\n", - " car 128 46 0.861 0.565 0.705 0.407\n", - " motorcycle 128 5 0.919 1 0.995 0.96\n", - " airplane 128 6 0.936 1 0.995 0.966\n", - " bus 128 7 0.955 1 0.995 0.849\n", - " train 128 3 0.883 1 0.995 0.931\n", - " truck 128 12 1 0.651 0.825 0.655\n", - " boat 128 6 1 0.784 0.931 0.665\n", - " traffic light 128 14 0.804 0.571 0.652 0.385\n", - " stop sign 128 2 0.855 1 0.995 0.895\n", - " bench 128 9 1 0.98 0.995 0.892\n", - " bird 128 16 0.978 1 0.995 0.905\n", - " cat 128 4 0.913 1 0.995 0.995\n", - " dog 128 9 0.957 1 0.995 0.949\n", - " horse 128 2 0.839 1 0.995 0.995\n", - " elephant 128 17 0.975 1 0.995 0.898\n", - " bear 128 1 0.743 1 0.995 0.995\n", - " zebra 128 4 0.902 1 0.995 0.965\n", - " giraffe 128 9 0.958 1 0.995 0.963\n", - " backpack 128 6 0.891 0.833 0.842 0.742\n", - " umbrella 128 18 0.946 0.97 0.992 0.835\n", - " handbag 128 19 0.927 0.669 0.855 0.635\n", - " tie 128 7 0.936 0.857 0.865 0.759\n", - " suitcase 128 4 1 0.938 0.995 0.769\n", - " frisbee 128 5 0.919 0.8 0.895 0.787\n", - " skis 128 1 0.756 1 0.995 0.895\n", - " snowboard 128 7 0.945 0.857 0.937 0.823\n", - " sports ball 128 6 1 0.65 0.761 0.434\n", - " kite 128 10 0.922 0.7 0.928 0.468\n", - " baseball bat 128 4 1 0.962 0.995 0.717\n", - " baseball glove 128 7 1 0.496 0.596 0.367\n", - " skateboard 128 5 0.819 1 0.995 0.744\n", - " tennis racket 128 7 0.968 0.714 0.723 0.531\n", - " bottle 128 18 1 0.74 0.94 0.698\n", - " wine glass 128 16 0.805 0.775 0.927 0.655\n", - " cup 128 36 0.943 0.918 0.98 0.781\n", - " fork 128 6 1 0.935 0.995 0.849\n", - " knife 128 16 0.865 0.812 0.91 0.715\n", - " spoon 128 22 0.946 0.864 0.904 0.698\n", - " bowl 128 28 0.957 0.795 0.949 0.819\n", - " banana 128 1 0.729 1 0.995 0.995\n", - " sandwich 128 2 0.822 1 0.995 0.995\n", - " orange 128 4 0.913 1 0.995 0.869\n", - " broccoli 128 11 1 0.727 0.841 0.679\n", - " carrot 128 24 1 0.936 0.984 0.792\n", - " hot dog 128 2 0.827 1 0.995 0.995\n", - " pizza 128 5 0.915 1 0.995 0.995\n", - " donut 128 14 0.909 1 0.995 0.967\n", - " cake 128 4 0.901 1 0.995 0.967\n", - " chair 128 35 0.958 0.857 0.935 0.747\n", - " couch 128 6 0.944 1 0.995 0.899\n", - " potted plant 128 14 0.946 1 0.995 0.865\n", - " bed 128 3 0.878 1 0.995 0.954\n", - " dining table 128 13 1 0.995 0.995 0.897\n", - " toilet 128 2 0.835 1 0.995 0.995\n", - " tv 128 2 0.849 1 0.995 0.948\n", - " laptop 128 3 0.881 1 0.995 0.93\n", - " mouse 128 2 1 0.773 0.995 0.704\n", - " remote 128 8 1 0.745 0.761 0.684\n", - " cell phone 128 8 0.835 0.634 0.799 0.604\n", - " microwave 128 3 0.865 1 0.995 0.895\n", - " oven 128 5 0.893 0.8 0.808 0.731\n", - " sink 128 6 0.79 0.632 0.924 0.695\n", - " refrigerator 128 5 0.929 1 0.995 0.867\n", - " book 128 29 0.815 0.606 0.826 0.62\n", - " clock 128 9 0.886 1 0.995 0.899\n", - " vase 128 2 0.83 1 0.995 0.921\n", - " scissors 128 1 0.741 1 0.995 0.995\n", - " teddy bear 128 21 0.977 1 0.995 0.842\n", - " toothbrush 128 5 0.918 1 0.995 0.879\n", - "Speed: 0.1ms preprocess, 5.7ms inference, 0.0ms loss, 0.3ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_1_finetune61\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 5.1ms inference, 0.0ms loss, 0.3ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_1_finetune62\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -1341,83 +781,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 43081939 parameters, 0 gradients, 162.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.907 0.888 0.937 0.804\n", - " person 128 254 0.951 0.849 0.933 0.772\n", - " bicycle 128 6 0.89 0.667 0.811 0.658\n", - " car 128 46 0.85 0.565 0.71 0.422\n", - " motorcycle 128 5 0.917 1 0.995 0.932\n", - " airplane 128 6 0.934 1 0.995 0.965\n", - " bus 128 7 0.968 1 0.995 0.836\n", - " train 128 3 0.877 1 0.995 0.888\n", - " truck 128 12 1 0.647 0.836 0.663\n", - " boat 128 6 1 0.819 0.922 0.66\n", - " traffic light 128 14 0.897 0.571 0.644 0.386\n", - " stop sign 128 2 0.851 1 0.995 0.895\n", - " bench 128 9 1 0.954 0.995 0.899\n", - " bird 128 16 0.978 1 0.995 0.898\n", - " cat 128 4 0.901 1 0.995 0.972\n", - " dog 128 9 0.956 1 0.995 0.948\n", - " horse 128 2 0.833 1 0.995 0.995\n", - " elephant 128 17 0.975 1 0.995 0.91\n", - " bear 128 1 0.738 1 0.995 0.995\n", - " zebra 128 4 0.9 1 0.995 0.995\n", - " giraffe 128 9 0.957 1 0.995 0.976\n", - " backpack 128 6 0.871 0.833 0.842 0.756\n", - " umbrella 128 18 0.946 0.968 0.992 0.819\n", - " handbag 128 19 0.927 0.668 0.854 0.65\n", - " tie 128 7 0.93 0.857 0.865 0.772\n", - " suitcase 128 4 1 0.969 0.995 0.748\n", - " frisbee 128 5 0.92 0.8 0.821 0.797\n", - " skis 128 1 0.75 1 0.995 0.895\n", - " snowboard 128 7 0.944 0.857 0.953 0.859\n", - " sports ball 128 6 1 0.622 0.771 0.45\n", - " kite 128 10 0.924 0.7 0.932 0.473\n", - " baseball bat 128 4 1 0.97 0.995 0.694\n", - " baseball glove 128 7 1 0.499 0.696 0.366\n", - " skateboard 128 5 0.82 1 0.962 0.719\n", - " tennis racket 128 7 0.937 0.714 0.722 0.519\n", - " bottle 128 18 1 0.747 0.935 0.683\n", - " wine glass 128 16 0.797 0.738 0.924 0.655\n", - " cup 128 36 0.942 0.896 0.979 0.797\n", - " fork 128 6 1 0.961 0.995 0.793\n", - " knife 128 16 0.813 0.814 0.928 0.747\n", - " spoon 128 22 0.902 0.864 0.903 0.689\n", - " bowl 128 28 0.957 0.79 0.91 0.81\n", - " banana 128 1 0.719 1 0.995 0.995\n", - " sandwich 128 2 0.821 1 0.995 0.995\n", - " orange 128 4 0.914 1 0.995 0.877\n", - " broccoli 128 11 1 0.694 0.852 0.69\n", - " carrot 128 24 0.991 0.958 0.982 0.793\n", - " hot dog 128 2 0.82 1 0.995 0.995\n", - " pizza 128 5 0.913 1 0.995 0.91\n", - " donut 128 14 0.909 1 0.995 0.968\n", - " cake 128 4 0.896 1 0.995 0.932\n", - " chair 128 35 0.968 0.864 0.935 0.722\n", - " couch 128 6 0.94 1 0.995 0.859\n", - " potted plant 128 14 0.95 1 0.995 0.862\n", - " bed 128 3 0.871 1 0.995 0.953\n", - " dining table 128 13 0.928 1 0.99 0.903\n", - " toilet 128 2 0.838 1 0.995 0.922\n", - " tv 128 2 0.844 1 0.995 0.947\n", - " laptop 128 3 0.875 1 0.995 0.93\n", - " mouse 128 2 1 0.771 0.995 0.657\n", - " remote 128 8 1 0.72 0.761 0.707\n", - " cell phone 128 8 0.84 0.659 0.797 0.618\n", - " microwave 128 3 0.859 1 0.995 0.964\n", - " oven 128 5 0.897 0.8 0.805 0.727\n", - " sink 128 6 0.781 0.604 0.924 0.71\n", - " refrigerator 128 5 0.949 1 0.995 0.937\n", - " book 128 29 0.815 0.607 0.802 0.609\n", - " clock 128 9 0.908 1 0.995 0.899\n", - " vase 128 2 0.826 1 0.995 0.921\n", - " scissors 128 1 0.737 1 0.995 0.995\n", - " teddy bear 128 21 0.942 1 0.995 0.838\n", - " toothbrush 128 5 0.913 1 0.995 0.901\n", - "Speed: 0.2ms preprocess, 13.2ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_1_post_val47\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_1_post_val48\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -1439,84 +808,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 42712366 parameters, 74176 gradients, 161.3 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.923 0.871 0.933 0.794\n", - " person 128 254 0.967 0.809 0.928 0.765\n", - " bicycle 128 6 0.912 0.667 0.855 0.633\n", - " car 128 46 0.911 0.446 0.687 0.396\n", - " motorcycle 128 5 0.943 1 0.995 0.918\n", - " airplane 128 6 0.939 1 0.995 0.965\n", - " bus 128 7 0.987 1 0.995 0.861\n", - " train 128 3 0.887 1 0.995 0.899\n", - " truck 128 12 1 0.629 0.829 0.621\n", - " boat 128 6 1 0.753 0.883 0.657\n", - " traffic light 128 14 0.902 0.571 0.619 0.388\n", - " stop sign 128 2 0.865 1 0.995 0.849\n", - " bench 128 9 1 0.947 0.995 0.894\n", - " bird 128 16 0.983 1 0.995 0.869\n", - " cat 128 4 0.911 1 0.995 0.974\n", - " dog 128 9 0.96 1 0.995 0.949\n", - " horse 128 2 0.845 1 0.995 0.995\n", - " elephant 128 17 0.978 1 0.995 0.912\n", - " bear 128 1 0.753 1 0.995 0.895\n", - " zebra 128 4 0.907 1 0.995 0.977\n", - " giraffe 128 9 0.962 1 0.995 0.976\n", - " backpack 128 6 0.897 0.833 0.843 0.768\n", - " umbrella 128 18 0.956 0.944 0.99 0.851\n", - " handbag 128 19 0.929 0.688 0.855 0.642\n", - " tie 128 7 0.94 0.857 0.865 0.735\n", - " suitcase 128 4 1 0.963 0.995 0.768\n", - " frisbee 128 5 0.925 0.8 0.906 0.784\n", - " skis 128 1 0.77 1 0.995 0.995\n", - " snowboard 128 7 0.958 0.857 0.944 0.858\n", - " sports ball 128 6 1 0.59 0.728 0.443\n", - " kite 128 10 0.964 0.7 0.902 0.47\n", - " baseball bat 128 4 0.825 0.75 0.756 0.681\n", - " baseball glove 128 7 1 0.493 0.737 0.377\n", - " skateboard 128 5 1 0.994 0.995 0.722\n", - " tennis racket 128 7 1 0.71 0.723 0.542\n", - " bottle 128 18 1 0.665 0.924 0.652\n", - " wine glass 128 16 0.776 0.649 0.913 0.664\n", - " cup 128 36 0.97 0.907 0.969 0.773\n", - " fork 128 6 1 0.897 0.995 0.763\n", - " knife 128 16 0.926 0.787 0.881 0.703\n", - " spoon 128 22 1 0.848 0.906 0.712\n", - " bowl 128 28 0.964 0.786 0.903 0.799\n", - " banana 128 1 0.738 1 0.995 0.995\n", - " sandwich 128 2 0.832 1 0.995 0.995\n", - " orange 128 4 0.925 1 0.995 0.877\n", - " broccoli 128 11 1 0.641 0.842 0.679\n", - " carrot 128 24 1 0.943 0.984 0.795\n", - " hot dog 128 2 0.829 1 0.995 0.995\n", - " pizza 128 5 0.919 1 0.995 0.938\n", - " donut 128 14 0.911 1 0.995 0.969\n", - " cake 128 4 0.904 1 0.995 0.923\n", - " chair 128 35 0.967 0.837 0.946 0.723\n", - " couch 128 6 0.952 1 0.995 0.845\n", - " potted plant 128 14 0.939 1 0.995 0.858\n", - " bed 128 3 0.935 1 0.995 0.953\n", - " dining table 128 13 0.926 0.967 0.984 0.895\n", - " toilet 128 2 0.853 1 0.995 0.922\n", - " tv 128 2 0.863 1 0.995 0.995\n", - " laptop 128 3 0.902 1 0.995 0.93\n", - " mouse 128 2 1 0.762 0.995 0.635\n", - " remote 128 8 1 0.733 0.76 0.674\n", - " cell phone 128 8 0.906 0.625 0.898 0.653\n", - " microwave 128 3 0.877 1 0.995 0.931\n", - " oven 128 5 0.795 0.8 0.807 0.71\n", - " sink 128 6 1 0.446 0.87 0.626\n", - " refrigerator 128 5 0.933 1 0.995 0.902\n", - " book 128 29 0.801 0.556 0.783 0.598\n", - " clock 128 9 0.946 1 0.995 0.908\n", - " vase 128 2 0.842 1 0.995 0.921\n", - " scissors 128 1 0.752 1 0.995 0.697\n", - " teddy bear 128 21 0.973 1 0.995 0.846\n", - " toothbrush 128 5 0.898 1 0.995 0.895\n", - "Speed: 0.2ms preprocess, 13.2ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_2_pre_val49\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_2_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_2_finetune47\n", + "Speed: 0.2ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_2_pre_val50\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_2_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_2_finetune48\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -1535,149 +833,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_2_finetune47/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_2_finetune48/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_2_finetune47\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_2_finetune48\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 11.9G 0.548 0.3528 0.9023 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.4G 0.548 0.3528 0.9023 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.923 0.871 0.935 0.801\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.8G 0.4746 0.3015 0.8763 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.3G 0.4746 0.3015 0.8763 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.93 0.877 0.94 0.806\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.8G 0.5379 0.3445 0.9065 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 13.2G 0.5379 0.3445 0.9065 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.871 0.942 0.812\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.8G 0.5157 0.3339 0.9019 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13.3G 0.5157 0.3339 0.9019 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.93 0.877 0.938 0.811\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.8G 0.5169 0.3404 0.8804 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.3G 0.5169 0.3404 0.8804 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.921 0.883 0.939 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.8G 0.5339 0.3559 0.9031 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13.3G 0.5339 0.3559 0.9031 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.929 0.878 0.94 0.811\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.9G 0.5597 0.3561 0.8945 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13.3G 0.5597 0.3561 0.8945 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.876 0.941 0.813\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.8G 0.5733 0.3857 0.9149 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13.3G 0.5733 0.3857 0.9149 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.936 0.879 0.941 0.818\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.8G 0.5769 0.3717 0.9222 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13.3G 0.5769 0.3717 0.9222 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.941 0.882 0.941 0.821\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.8G 0.6167 0.3871 0.9151 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 13.2G 0.6167 0.3871 0.9151 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.882 0.941 0.821\n", "\n", - "10 epochs completed in 0.025 hours.\n", - "Optimizer stripped from runs/detect/step_2_finetune47/weights/last.pt, 171.5MB\n", - "Optimizer stripped from runs/detect/step_2_finetune47/weights/best.pt, 171.5MB\n", + "10 epochs completed in 0.024 hours.\n", + "Optimizer stripped from runs/detect/step_2_finetune48/weights/last.pt, 171.5MB\n", + "Optimizer stripped from runs/detect/step_2_finetune48/weights/best.pt, 171.5MB\n", "\n", - "Validating runs/detect/step_2_finetune47/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_2_finetune48/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 42712366 parameters, 0 gradients, 161.3 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.882 0.942 0.822\n", - " person 128 254 0.976 0.795 0.925 0.77\n", - " bicycle 128 6 0.934 0.667 0.832 0.633\n", - " car 128 46 1 0.388 0.709 0.414\n", - " motorcycle 128 5 0.936 1 0.995 0.974\n", - " airplane 128 6 0.967 1 0.995 0.952\n", - " bus 128 7 1 0.924 0.995 0.866\n", - " train 128 3 0.925 1 0.995 0.995\n", - " truck 128 12 0.98 0.583 0.807 0.598\n", - " boat 128 6 0.973 0.833 0.84 0.689\n", - " traffic light 128 14 1 0.567 0.663 0.418\n", - " stop sign 128 2 0.891 1 0.995 0.924\n", - " bench 128 9 0.998 1 0.995 0.884\n", - " bird 128 16 0.984 1 0.995 0.893\n", - " cat 128 4 0.947 1 0.995 0.931\n", - " dog 128 9 0.966 1 0.995 0.967\n", - " horse 128 2 0.885 1 0.995 0.995\n", - " elephant 128 17 0.987 1 0.995 0.923\n", - " bear 128 1 0.805 1 0.995 0.995\n", - " zebra 128 4 0.927 1 0.995 0.995\n", - " giraffe 128 9 0.972 1 0.995 0.977\n", - " backpack 128 6 0.869 0.833 0.845 0.779\n", - " umbrella 128 18 0.994 1 0.995 0.854\n", - " handbag 128 19 0.974 0.737 0.887 0.693\n", - " tie 128 7 0.959 0.857 0.869 0.762\n", - " suitcase 128 4 0.958 1 0.995 0.774\n", - " frisbee 128 5 1 0.847 0.995 0.821\n", - " skis 128 1 0.811 1 0.995 0.995\n", - " snowboard 128 7 0.952 0.857 0.876 0.793\n", - " sports ball 128 6 0.985 0.667 0.803 0.483\n", - " kite 128 10 0.952 0.7 0.916 0.532\n", - " baseball bat 128 4 1 0.901 0.995 0.781\n", - " baseball glove 128 7 1 0.482 0.617 0.408\n", - " skateboard 128 5 0.923 1 0.995 0.803\n", - " tennis racket 128 7 0.986 0.714 0.728 0.521\n", - " bottle 128 18 1 0.598 0.975 0.676\n", - " wine glass 128 16 0.802 0.688 0.872 0.643\n", - " cup 128 36 0.971 0.919 0.984 0.826\n", - " fork 128 6 0.916 1 0.995 0.866\n", - " knife 128 16 0.926 0.786 0.943 0.699\n", - " spoon 128 22 1 0.755 0.907 0.731\n", - " bowl 128 28 1 0.819 0.959 0.829\n", - " banana 128 1 0.807 1 0.995 0.995\n", - " sandwich 128 2 0.869 1 0.995 0.995\n", - " orange 128 4 0.924 1 0.995 0.905\n", - " broccoli 128 11 1 0.72 0.841 0.67\n", - " carrot 128 24 0.94 0.958 0.98 0.79\n", - " hot dog 128 2 0.869 1 0.995 0.995\n", - " pizza 128 5 0.939 1 0.995 0.978\n", - " donut 128 14 0.927 1 0.995 0.973\n", - " cake 128 4 0.932 1 0.995 0.995\n", - " chair 128 35 1 0.774 0.976 0.759\n", - " couch 128 6 0.953 1 0.995 0.883\n", - " potted plant 128 14 0.967 1 0.995 0.879\n", - " bed 128 3 0.904 1 0.995 0.995\n", - " dining table 128 13 0.927 0.923 0.99 0.872\n", - " toilet 128 2 0.876 1 0.995 0.995\n", - " tv 128 2 0.872 1 0.995 0.995\n", - " laptop 128 3 0.921 1 0.995 0.952\n", - " mouse 128 2 1 0.759 0.995 0.655\n", - " remote 128 8 0.992 0.75 0.761 0.675\n", - " cell phone 128 8 1 0.813 0.971 0.658\n", - " microwave 128 3 0.909 1 0.995 0.915\n", - " oven 128 5 0.759 0.8 0.725 0.66\n", - " sink 128 6 1 0.7 0.995 0.756\n", - " refrigerator 128 5 0.95 1 0.995 0.95\n", - " book 128 29 0.919 0.483 0.861 0.644\n", - " clock 128 9 0.943 1 0.995 0.922\n", - " vase 128 2 0.876 1 0.995 0.995\n", - " scissors 128 1 0.802 1 0.995 0.995\n", - " teddy bear 128 21 0.982 1 0.995 0.898\n", - " toothbrush 128 5 0.957 1 0.995 0.931\n", - "Speed: 0.1ms preprocess, 5.6ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_2_finetune47\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.9ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_2_finetune48\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -1694,82 +921,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 42712366 parameters, 0 gradients, 161.3 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.92 0.888 0.943 0.813\n", - " person 128 254 0.972 0.823 0.933 0.773\n", - " bicycle 128 6 0.913 0.667 0.847 0.625\n", - " car 128 46 1 0.435 0.728 0.42\n", - " motorcycle 128 5 0.923 1 0.995 0.926\n", - " airplane 128 6 0.95 1 0.995 0.977\n", - " bus 128 7 1 0.923 0.995 0.885\n", - " train 128 3 0.905 1 0.995 0.73\n", - " truck 128 12 0.884 0.583 0.81 0.627\n", - " boat 128 6 0.962 0.833 0.84 0.697\n", - " traffic light 128 14 0.971 0.571 0.656 0.414\n", - " stop sign 128 2 0.867 1 0.995 0.924\n", - " bench 128 9 1 0.998 0.995 0.874\n", - " bird 128 16 0.982 1 0.995 0.91\n", - " cat 128 4 0.936 1 0.995 0.931\n", - " dog 128 9 0.958 1 0.995 0.966\n", - " horse 128 2 0.863 1 0.995 0.995\n", - " elephant 128 17 0.987 1 0.995 0.928\n", - " bear 128 1 0.768 1 0.995 0.995\n", - " zebra 128 4 0.912 1 0.995 0.995\n", - " giraffe 128 9 0.962 1 0.995 0.977\n", - " backpack 128 6 0.804 0.833 0.845 0.763\n", - " umbrella 128 18 0.99 1 0.995 0.845\n", - " handbag 128 19 1 0.734 0.876 0.685\n", - " tie 128 7 0.947 0.857 0.867 0.79\n", - " suitcase 128 4 0.939 1 0.995 0.792\n", - " frisbee 128 5 1 0.866 0.995 0.836\n", - " skis 128 1 0.773 1 0.995 0.995\n", - " snowboard 128 7 0.816 0.857 0.876 0.805\n", - " sports ball 128 6 0.976 0.667 0.805 0.456\n", - " kite 128 10 0.941 0.7 0.937 0.525\n", - " baseball bat 128 4 1 0.953 0.995 0.735\n", - " baseball glove 128 7 1 0.494 0.726 0.432\n", - " skateboard 128 5 0.918 1 0.995 0.773\n", - " tennis racket 128 7 0.955 0.714 0.729 0.531\n", - " bottle 128 18 1 0.731 0.962 0.646\n", - " wine glass 128 16 0.871 0.843 0.92 0.672\n", - " cup 128 36 0.942 0.895 0.973 0.811\n", - " fork 128 6 0.848 1 0.995 0.802\n", - " knife 128 16 0.816 0.831 0.933 0.665\n", - " spoon 128 22 1 0.757 0.911 0.708\n", - " bowl 128 28 1 0.834 0.936 0.833\n", - " banana 128 1 0.764 1 0.995 0.995\n", - " sandwich 128 2 0.846 1 0.995 0.995\n", - " orange 128 4 0.906 1 0.995 0.895\n", - " broccoli 128 11 0.891 0.748 0.829 0.673\n", - " carrot 128 24 0.904 0.958 0.984 0.806\n", - " hot dog 128 2 0.842 1 0.995 0.995\n", - " pizza 128 5 0.923 1 0.995 0.973\n", - " donut 128 14 0.926 1 0.995 0.972\n", - " cake 128 4 0.909 1 0.995 0.954\n", - " chair 128 35 0.986 0.8 0.963 0.721\n", - " couch 128 6 0.935 1 0.995 0.858\n", - " potted plant 128 14 0.958 1 0.995 0.895\n", - " bed 128 3 0.885 1 0.995 0.995\n", - " dining table 128 13 1 0.921 0.995 0.866\n", - " toilet 128 2 0.851 1 0.995 0.922\n", - " tv 128 2 0.845 1 0.995 0.995\n", - " laptop 128 3 0.887 1 0.995 0.995\n", - " mouse 128 2 1 0.813 0.995 0.655\n", - " remote 128 8 1 0.719 0.761 0.675\n", - " cell phone 128 8 0.88 0.75 0.945 0.681\n", - " microwave 128 3 0.882 1 0.995 0.931\n", - " oven 128 5 0.733 0.8 0.724 0.629\n", - " sink 128 6 1 0.679 0.995 0.748\n", - " refrigerator 128 5 0.937 1 0.995 0.949\n", - " book 128 29 0.838 0.517 0.839 0.644\n", - " clock 128 9 0.954 1 0.995 0.916\n", - " vase 128 2 0.85 1 0.995 0.995\n", - " scissors 128 1 0.766 1 0.995 0.995\n", - " teddy bear 128 21 0.979 1 0.995 0.878\n", - " toothbrush 128 5 1 0.945 0.995 0.877\n", - "Speed: 0.2ms preprocess, 13.2ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_2_post_val41\u001b[0m\n" + "Speed: 0.1ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_2_post_val42\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -1790,86 +947,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", "YOLOv8l summary (fused): 285 layers, 42094706 parameters, 74176 gradients, 158.8 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.864 0.936 0.804\n", - " person 128 254 0.985 0.763 0.918 0.761\n", - " bicycle 128 6 0.963 0.667 0.843 0.647\n", - " car 128 46 1 0.329 0.711 0.413\n", - " motorcycle 128 5 0.936 1 0.995 0.927\n", - " airplane 128 6 0.969 1 0.995 0.91\n", - " bus 128 7 1 0.892 0.995 0.886\n", - " train 128 3 0.921 1 0.995 0.834\n", - " truck 128 12 0.989 0.583 0.749 0.604\n", - " boat 128 6 0.985 0.833 0.883 0.691\n", - " traffic light 128 14 1 0.538 0.616 0.396\n", - " stop sign 128 2 0.892 1 0.995 0.895\n", - " bench 128 9 0.981 1 0.995 0.873\n", - " bird 128 16 0.987 1 0.995 0.884\n", - " cat 128 4 0.959 1 0.995 0.929\n", - " dog 128 9 0.968 1 0.995 0.967\n", - " horse 128 2 0.896 1 0.995 0.995\n", - " elephant 128 17 1 1 0.995 0.923\n", - " bear 128 1 0.805 1 0.995 0.895\n", - " zebra 128 4 0.929 1 0.995 0.995\n", - " giraffe 128 9 0.973 1 0.995 0.962\n", - " backpack 128 6 0.935 0.833 0.844 0.748\n", - " umbrella 128 18 1 0.979 0.995 0.847\n", - " handbag 128 19 0.981 0.737 0.86 0.673\n", - " tie 128 7 0.963 0.857 0.869 0.765\n", - " suitcase 128 4 0.957 1 0.995 0.79\n", - " frisbee 128 5 1 0.822 0.995 0.803\n", - " skis 128 1 0.812 1 0.995 0.895\n", - " snowboard 128 7 0.958 0.857 0.879 0.793\n", - " sports ball 128 6 1 0.599 0.756 0.459\n", - " kite 128 10 1 0.728 0.906 0.509\n", - " baseball bat 128 4 1 0.883 0.995 0.758\n", - " baseball glove 128 7 0.906 0.429 0.562 0.362\n", - " skateboard 128 5 0.984 1 0.995 0.715\n", - " tennis racket 128 7 1 0.687 0.728 0.525\n", - " bottle 128 18 1 0.598 0.969 0.659\n", - " wine glass 128 16 0.831 0.613 0.888 0.664\n", - " cup 128 36 0.956 0.889 0.976 0.802\n", - " fork 128 6 0.837 0.856 0.972 0.818\n", - " knife 128 16 0.858 0.757 0.907 0.652\n", - " spoon 128 22 1 0.758 0.829 0.679\n", - " bowl 128 28 0.976 0.786 0.948 0.83\n", - " banana 128 1 0.81 1 0.995 0.995\n", - " sandwich 128 2 0.874 1 0.995 0.995\n", - " orange 128 4 0.926 1 0.995 0.867\n", - " broccoli 128 11 1 0.711 0.868 0.657\n", - " carrot 128 24 0.916 0.913 0.972 0.777\n", - " hot dog 128 2 0.867 1 0.995 0.995\n", - " pizza 128 5 0.938 1 0.995 0.943\n", - " donut 128 14 0.945 1 0.995 0.965\n", - " cake 128 4 0.927 1 0.995 0.953\n", - " chair 128 35 1 0.794 0.975 0.718\n", - " couch 128 6 0.894 1 0.995 0.858\n", - " potted plant 128 14 0.977 1 0.995 0.878\n", - " bed 128 3 0.902 1 0.995 0.995\n", - " dining table 128 13 1 0.899 0.995 0.873\n", - " toilet 128 2 0.881 1 0.995 0.923\n", - " tv 128 2 0.874 1 0.995 0.995\n", - " laptop 128 3 0.842 1 0.995 0.963\n", - " mouse 128 2 1 0.705 0.995 0.654\n", - " remote 128 8 1 0.59 0.762 0.688\n", - " cell phone 128 8 0.92 0.75 0.927 0.635\n", - " microwave 128 3 0.902 1 0.995 0.964\n", - " oven 128 5 0.756 0.8 0.725 0.66\n", - " sink 128 6 1 0.472 0.995 0.744\n", - " refrigerator 128 5 0.95 1 0.995 0.952\n", - " book 128 29 0.933 0.477 0.85 0.632\n", - " clock 128 9 0.962 1 0.995 0.925\n", - " vase 128 2 0.88 1 0.995 0.995\n", - " scissors 128 1 0.802 1 0.995 0.995\n", - " teddy bear 128 21 0.965 1 0.995 0.864\n", - " toothbrush 128 5 1 0.959 0.995 0.918\n", - "Speed: 0.2ms preprocess, 13.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_3_pre_val35\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_3_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_3_finetune35\n", + "Speed: 0.1ms preprocess, 12.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_3_pre_val36\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_3_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_3_finetune36\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -1888,148 +973,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_3_finetune35/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_3_finetune36/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_3_finetune35\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_3_finetune36\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 11.9G 0.5395 0.3534 0.897 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.9G 0.5395 0.3534 0.897 122 640: 100%|██████████| 8/8 [00:42\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.875 0.937 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.8G 0.4523 0.2943 0.8601 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.2G 0.4523 0.2943 0.8601 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.875 0.942 0.816\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.7G 0.5011 0.3242 0.885 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 13.2G 0.5011 0.3242 0.885 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.879 0.94 0.816\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.7G 0.4896 0.3239 0.881 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13.2G 0.4896 0.3239 0.881 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.873 0.941 0.818\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.7G 0.4877 0.3266 0.8653 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.2G 0.4877 0.3266 0.8653 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.924 0.883 0.939 0.819\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.7G 0.5175 0.341 0.8913 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13.2G 0.5175 0.341 0.8913 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.879 0.943 0.824\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.7G 0.5484 0.3518 0.8896 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13.2G 0.5484 0.3518 0.8896 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.882 0.944 0.825\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.7G 0.5657 0.3636 0.901 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13.2G 0.5657 0.3636 0.901 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.947 0.877 0.944 0.827\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.7G 0.5557 0.3553 0.908 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13.2G 0.5557 0.3553 0.908 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.888 0.945 0.827\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.7G 0.6072 0.381 0.9066 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 13.2G 0.6072 0.381 0.9066 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.938 0.885 0.945 0.827\n", "\n", - "10 epochs completed in 0.028 hours.\n", - "Optimizer stripped from runs/detect/step_3_finetune35/weights/last.pt, 169.0MB\n", - "Optimizer stripped from runs/detect/step_3_finetune35/weights/best.pt, 169.0MB\n", + "10 epochs completed in 0.037 hours.\n", + "Optimizer stripped from runs/detect/step_3_finetune36/weights/last.pt, 169.0MB\n", + "Optimizer stripped from runs/detect/step_3_finetune36/weights/best.pt, 169.0MB\n", "\n", - "Validating runs/detect/step_3_finetune35/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_3_finetune36/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 42094706 parameters, 0 gradients, 158.8 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.947 0.877 0.944 0.827\n", - " person 128 254 0.954 0.82 0.933 0.777\n", - " bicycle 128 6 1 0.584 0.84 0.615\n", - " car 128 46 0.997 0.5 0.724 0.439\n", - " motorcycle 128 5 0.944 1 0.995 0.939\n", - " airplane 128 6 0.958 1 0.995 0.973\n", - " bus 128 7 1 0.947 0.995 0.882\n", - " train 128 3 0.92 1 0.995 0.995\n", - " truck 128 12 1 0.655 0.792 0.648\n", - " boat 128 6 0.983 0.833 0.972 0.805\n", - " traffic light 128 14 0.749 0.429 0.635 0.425\n", - " stop sign 128 2 0.912 1 0.995 0.927\n", - " bench 128 9 1 0.941 0.995 0.942\n", - " bird 128 16 0.99 1 0.995 0.884\n", - " cat 128 4 0.939 1 0.995 0.995\n", - " dog 128 9 0.972 1 0.995 0.977\n", - " horse 128 2 0.893 1 0.995 0.895\n", - " elephant 128 17 0.983 1 0.995 0.907\n", - " bear 128 1 0.834 1 0.995 0.995\n", - " zebra 128 4 0.937 1 0.995 0.995\n", - " giraffe 128 9 0.97 1 0.995 0.928\n", - " backpack 128 6 0.953 0.833 0.85 0.764\n", - " umbrella 128 18 0.979 0.944 0.992 0.875\n", - " handbag 128 19 1 0.761 0.893 0.685\n", - " tie 128 7 0.962 0.857 0.873 0.784\n", - " suitcase 128 4 1 0.957 0.995 0.827\n", - " frisbee 128 5 1 0.843 0.995 0.824\n", - " skis 128 1 0.844 1 0.995 0.995\n", - " snowboard 128 7 0.953 0.857 0.881 0.793\n", - " sports ball 128 6 0.962 0.667 0.801 0.49\n", - " kite 128 10 1 0.717 0.986 0.592\n", - " baseball bat 128 4 1 0.858 0.995 0.73\n", - " baseball glove 128 7 1 0.471 0.601 0.369\n", - " skateboard 128 5 0.983 1 0.995 0.824\n", - " tennis racket 128 7 0.971 0.714 0.725 0.552\n", - " bottle 128 18 0.961 0.778 0.95 0.689\n", - " wine glass 128 16 0.819 0.562 0.851 0.632\n", - " cup 128 36 0.942 0.899 0.982 0.826\n", - " fork 128 6 0.962 1 0.995 0.899\n", - " knife 128 16 0.99 0.812 0.89 0.716\n", - " spoon 128 22 0.957 0.864 0.909 0.707\n", - " bowl 128 28 0.96 0.857 0.955 0.79\n", - " banana 128 1 0.833 1 0.995 0.995\n", - " sandwich 128 2 0.888 1 0.995 0.995\n", - " orange 128 4 0.949 1 0.995 0.923\n", - " broccoli 128 11 1 0.597 0.884 0.658\n", - " carrot 128 24 1 0.897 0.985 0.813\n", - " hot dog 128 2 0.888 1 0.995 0.995\n", - " pizza 128 5 0.952 1 0.995 0.977\n", - " donut 128 14 0.958 1 0.995 0.985\n", - " cake 128 4 0.939 1 0.995 0.995\n", - " chair 128 35 1 0.79 0.963 0.754\n", - " couch 128 6 0.886 1 0.995 0.9\n", - " potted plant 128 14 0.979 1 0.995 0.875\n", - " bed 128 3 0.918 1 0.995 0.941\n", - " dining table 128 13 0.953 1 0.995 0.875\n", - " toilet 128 2 0.904 1 0.995 0.995\n", - " tv 128 2 0.897 1 0.995 0.995\n", - " laptop 128 3 0.676 0.702 0.83 0.798\n", - " mouse 128 2 1 0.668 0.995 0.708\n", - " remote 128 8 0.962 0.75 0.761 0.687\n", - " cell phone 128 8 1 0.667 0.982 0.627\n", - " microwave 128 3 1 1 0.995 0.973\n", - " oven 128 5 1 0.901 0.995 0.775\n", - " sink 128 6 1 0.773 0.955 0.781\n", - " refrigerator 128 5 0.952 1 0.995 0.959\n", - " book 128 29 1 0.527 0.867 0.677\n", - " clock 128 9 0.938 1 0.995 0.914\n", - " vase 128 2 0.893 1 0.995 0.948\n", - " scissors 128 1 0.832 1 0.995 0.995\n", - " teddy bear 128 21 0.991 1 0.995 0.914\n", - " toothbrush 128 5 0.948 1 0.995 0.965\n", - "Speed: 0.1ms preprocess, 5.4ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_3_finetune35\u001b[0m\n" + "Speed: 0.1ms preprocess, 4.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_3_finetune36\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2045,85 +1060,13 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", "YOLOv8l summary (fused): 285 layers, 42094706 parameters, 0 gradients, 158.8 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.875 0.943 0.824\n", - " person 128 254 0.959 0.829 0.942 0.78\n", - " bicycle 128 6 1 0.622 0.812 0.628\n", - " car 128 46 1 0.495 0.723 0.443\n", - " motorcycle 128 5 0.945 1 0.995 0.954\n", - " airplane 128 6 0.958 1 0.995 0.973\n", - " bus 128 7 1 0.934 0.995 0.882\n", - " train 128 3 0.923 1 0.995 0.865\n", - " truck 128 12 1 0.651 0.79 0.649\n", - " boat 128 6 0.99 0.833 0.972 0.782\n", - " traffic light 128 14 0.794 0.429 0.624 0.425\n", - " stop sign 128 2 0.914 1 0.995 0.927\n", - " bench 128 9 1 0.945 0.995 0.906\n", - " bird 128 16 0.99 1 0.995 0.871\n", - " cat 128 4 0.935 1 0.995 0.995\n", - " dog 128 9 0.973 1 0.995 0.977\n", - " horse 128 2 0.899 1 0.995 0.948\n", - " elephant 128 17 0.989 1 0.995 0.905\n", - " bear 128 1 0.835 1 0.995 0.995\n", - " zebra 128 4 0.938 1 0.995 0.995\n", - " giraffe 128 9 0.967 1 0.995 0.947\n", - " backpack 128 6 0.951 0.833 0.849 0.764\n", - " umbrella 128 18 0.997 0.944 0.992 0.866\n", - " handbag 128 19 1 0.722 0.838 0.667\n", - " tie 128 7 0.963 0.857 0.871 0.783\n", - " suitcase 128 4 1 0.961 0.995 0.825\n", - " frisbee 128 5 1 0.837 0.995 0.836\n", - " skis 128 1 0.845 1 0.995 0.995\n", - " snowboard 128 7 0.954 0.857 0.879 0.801\n", - " sports ball 128 6 0.981 0.667 0.786 0.5\n", - " kite 128 10 1 0.717 0.977 0.581\n", - " baseball bat 128 4 1 0.852 0.995 0.747\n", - " baseball glove 128 7 0.925 0.429 0.571 0.379\n", - " skateboard 128 5 0.979 1 0.995 0.803\n", - " tennis racket 128 7 0.973 0.714 0.726 0.539\n", - " bottle 128 18 0.983 0.778 0.963 0.677\n", - " wine glass 128 16 0.868 0.562 0.89 0.65\n", - " cup 128 36 0.94 0.866 0.978 0.817\n", - " fork 128 6 0.855 1 0.995 0.853\n", - " knife 128 16 0.987 0.812 0.888 0.727\n", - " spoon 128 22 0.959 0.864 0.909 0.712\n", - " bowl 128 28 0.959 0.84 0.955 0.814\n", - " banana 128 1 0.834 1 0.995 0.995\n", - " sandwich 128 2 0.863 1 0.995 0.995\n", - " orange 128 4 0.95 1 0.995 0.923\n", - " broccoli 128 11 1 0.647 0.883 0.645\n", - " carrot 128 24 1 0.899 0.985 0.825\n", - " hot dog 128 2 0.888 1 0.995 0.995\n", - " pizza 128 5 0.954 1 0.995 0.959\n", - " donut 128 14 0.952 1 0.995 0.966\n", - " cake 128 4 0.94 1 0.995 0.995\n", - " chair 128 35 1 0.812 0.966 0.747\n", - " couch 128 6 0.921 1 0.995 0.921\n", - " potted plant 128 14 0.924 1 0.986 0.869\n", - " bed 128 3 0.921 1 0.995 0.942\n", - " dining table 128 13 0.958 1 0.995 0.906\n", - " toilet 128 2 0.902 1 0.995 0.995\n", - " tv 128 2 0.892 1 0.995 0.995\n", - " laptop 128 3 0.674 0.697 0.83 0.798\n", - " mouse 128 2 1 0.657 0.995 0.708\n", - " remote 128 8 0.974 0.75 0.761 0.699\n", - " cell phone 128 8 1 0.641 0.995 0.667\n", - " microwave 128 3 0.926 1 0.995 0.967\n", - " oven 128 5 1 0.924 0.995 0.752\n", - " sink 128 6 1 0.759 0.955 0.714\n", - " refrigerator 128 5 0.954 1 0.995 0.958\n", - " book 128 29 0.938 0.518 0.854 0.669\n", - " clock 128 9 0.932 1 0.995 0.909\n", - " vase 128 2 0.895 1 0.995 0.995\n", - " scissors 128 1 0.831 1 0.995 0.995\n", - " teddy bear 128 21 0.983 1 0.995 0.915\n", - " toothbrush 128 5 0.959 1 0.995 0.919\n", - "Speed: 0.2ms preprocess, 13.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_3_post_val33\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 12.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_3_post_val34\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2145,84 +1088,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 40919781 parameters, 74176 gradients, 154.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.913 0.876 0.935 0.792\n", - " person 128 254 0.95 0.822 0.925 0.746\n", - " bicycle 128 6 1 0.475 0.735 0.545\n", - " car 128 46 0.972 0.565 0.706 0.385\n", - " motorcycle 128 5 0.912 1 0.995 0.94\n", - " airplane 128 6 0.947 1 0.995 0.922\n", - " bus 128 7 1 0.974 0.995 0.869\n", - " train 128 3 0.881 1 0.995 0.895\n", - " truck 128 12 1 0.645 0.801 0.641\n", - " boat 128 6 1 0.818 0.931 0.806\n", - " traffic light 128 14 0.6 0.324 0.41 0.263\n", - " stop sign 128 2 1 1 0.995 0.933\n", - " bench 128 9 0.859 0.889 0.975 0.858\n", - " bird 128 16 1 0.993 0.995 0.867\n", - " cat 128 4 0.9 1 0.995 0.937\n", - " dog 128 9 0.952 1 0.995 0.956\n", - " horse 128 2 0.81 1 0.995 0.949\n", - " elephant 128 17 0.97 1 0.995 0.902\n", - " bear 128 1 0.738 1 0.995 0.995\n", - " zebra 128 4 0.899 1 0.995 0.995\n", - " giraffe 128 9 0.946 1 0.995 0.911\n", - " backpack 128 6 0.916 0.833 0.895 0.775\n", - " umbrella 128 18 0.953 0.944 0.992 0.836\n", - " handbag 128 19 0.888 0.737 0.833 0.659\n", - " tie 128 7 0.963 0.857 0.87 0.773\n", - " suitcase 128 4 0.932 1 0.995 0.831\n", - " frisbee 128 5 1 0.876 0.995 0.747\n", - " skis 128 1 0.743 1 0.995 0.995\n", - " snowboard 128 7 0.882 0.857 0.87 0.743\n", - " sports ball 128 6 0.914 0.667 0.727 0.211\n", - " kite 128 10 0.979 0.7 0.968 0.49\n", - " baseball bat 128 4 1 0.94 0.995 0.732\n", - " baseball glove 128 7 1 0.498 0.598 0.361\n", - " skateboard 128 5 0.829 1 0.995 0.805\n", - " tennis racket 128 7 0.919 0.714 0.724 0.53\n", - " bottle 128 18 0.925 0.682 0.878 0.533\n", - " wine glass 128 16 0.867 0.625 0.938 0.662\n", - " cup 128 36 0.936 0.815 0.945 0.763\n", - " fork 128 6 0.856 0.833 0.972 0.76\n", - " knife 128 16 0.826 0.75 0.884 0.686\n", - " spoon 128 22 0.97 0.864 0.906 0.651\n", - " bowl 128 28 0.989 0.857 0.934 0.791\n", - " banana 128 1 0.739 1 0.995 0.895\n", - " sandwich 128 2 0.827 1 0.995 0.995\n", - " orange 128 4 0.928 1 0.995 0.837\n", - " broccoli 128 11 1 0.637 0.885 0.676\n", - " carrot 128 24 0.953 0.846 0.97 0.796\n", - " hot dog 128 2 0.823 1 0.995 0.995\n", - " pizza 128 5 0.922 1 0.995 0.959\n", - " donut 128 14 0.927 1 0.995 0.948\n", - " cake 128 4 0.908 1 0.995 0.924\n", - " chair 128 35 1 0.822 0.961 0.715\n", - " couch 128 6 0.941 1 0.995 0.912\n", - " potted plant 128 14 0.93 1 0.995 0.823\n", - " bed 128 3 0.872 1 0.995 0.94\n", - " dining table 128 13 0.938 0.923 0.986 0.894\n", - " toilet 128 2 1 0.976 0.995 0.995\n", - " tv 128 2 0.835 1 0.995 0.922\n", - " laptop 128 3 0.717 0.868 0.913 0.856\n", - " mouse 128 2 1 0.853 0.995 0.617\n", - " remote 128 8 0.913 0.75 0.762 0.674\n", - " cell phone 128 8 0.842 0.672 0.898 0.624\n", - " microwave 128 3 0.875 1 0.995 0.911\n", - " oven 128 5 1 0.946 0.995 0.823\n", - " sink 128 6 1 0.812 0.955 0.662\n", - " refrigerator 128 5 0.925 1 0.995 0.915\n", - " book 128 29 0.917 0.517 0.86 0.629\n", - " clock 128 9 0.959 1 0.995 0.908\n", - " vase 128 2 0.857 1 0.995 0.948\n", - " scissors 128 1 0.733 1 0.995 0.995\n", - " teddy bear 128 21 0.976 1 0.995 0.877\n", - " toothbrush 128 5 0.922 1 0.995 0.915\n", - "Speed: 0.2ms preprocess, 13.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_4_pre_val31\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_4_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_4_finetune31\n", + "Speed: 0.2ms preprocess, 12.6ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_4_pre_val32\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_4_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_4_finetune32\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -2241,149 +1113,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_4_finetune31/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_4_finetune32/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_4_finetune31\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_4_finetune32\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 11.8G 0.573 0.3665 0.9011 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.8G 0.573 0.3665 0.9011 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.926 0.881 0.94 0.803\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.7G 0.4596 0.2974 0.8554 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.4G 0.4596 0.2974 0.8554 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.879 0.943 0.815\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.6G 0.5037 0.3324 0.8775 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 13.2G 0.5037 0.3324 0.8775 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.875 0.94 0.815\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.6G 0.4863 0.313 0.8774 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13.3G 0.4863 0.313 0.8774 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.954 0.872 0.943 0.813\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.6G 0.4905 0.3254 0.8586 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 13.2G 0.4905 0.3254 0.8586 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.87 0.939 0.812\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.5G 0.5016 0.3312 0.8863 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13.2G 0.5016 0.3312 0.8863 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.914 0.888 0.939 0.812\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.6G 0.5446 0.3534 0.8808 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13.2G 0.5446 0.3534 0.8808 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.937 0.874 0.942 0.818\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.6G 0.554 0.3697 0.8957 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13.2G 0.554 0.3697 0.8957 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.927 0.885 0.942 0.821\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.6G 0.5756 0.359 0.9062 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13.2G 0.5756 0.359 0.9062 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.889 0.945 0.825\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.6G 0.6089 0.3807 0.9001 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 13.2G 0.6089 0.3807 0.9001 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.884 0.945 0.826\n", "\n", - "10 epochs completed in 0.025 hours.\n", - "Optimizer stripped from runs/detect/step_4_finetune31/weights/last.pt, 164.3MB\n", - "Optimizer stripped from runs/detect/step_4_finetune31/weights/best.pt, 164.3MB\n", + "10 epochs completed in 0.035 hours.\n", + "Optimizer stripped from runs/detect/step_4_finetune32/weights/last.pt, 164.3MB\n", + "Optimizer stripped from runs/detect/step_4_finetune32/weights/best.pt, 164.3MB\n", "\n", - "Validating runs/detect/step_4_finetune31/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_4_finetune32/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 40919781 parameters, 0 gradients, 154.4 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.884 0.945 0.827\n", - " person 128 254 0.975 0.827 0.944 0.782\n", - " bicycle 128 6 1 0.621 0.755 0.539\n", - " car 128 46 0.957 0.485 0.723 0.404\n", - " motorcycle 128 5 0.938 1 0.995 0.965\n", - " airplane 128 6 0.952 1 0.995 0.95\n", - " bus 128 7 1 0.943 0.995 0.848\n", - " train 128 3 0.913 1 0.995 0.931\n", - " truck 128 12 1 0.573 0.831 0.656\n", - " boat 128 6 0.931 0.833 0.955 0.821\n", - " traffic light 128 14 0.973 0.571 0.665 0.365\n", - " stop sign 128 2 0.881 1 0.995 0.924\n", - " bench 128 9 0.974 1 0.995 0.918\n", - " bird 128 16 0.984 1 0.995 0.893\n", - " cat 128 4 0.927 1 0.995 0.995\n", - " dog 128 9 0.966 1 0.995 0.986\n", - " horse 128 2 0.874 1 0.995 0.995\n", - " elephant 128 17 0.98 1 0.995 0.923\n", - " bear 128 1 0.805 1 0.995 0.995\n", - " zebra 128 4 0.939 1 0.995 0.995\n", - " giraffe 128 9 0.971 1 0.995 0.973\n", - " backpack 128 6 0.931 0.833 0.847 0.776\n", - " umbrella 128 18 1 0.923 0.995 0.887\n", - " handbag 128 19 0.946 0.789 0.888 0.714\n", - " tie 128 7 1 0.759 0.871 0.767\n", - " suitcase 128 4 1 0.903 0.995 0.818\n", - " frisbee 128 5 0.947 0.8 0.938 0.782\n", - " skis 128 1 0.809 1 0.995 0.895\n", - " snowboard 128 7 0.955 0.857 0.872 0.789\n", - " sports ball 128 6 0.954 0.667 0.786 0.433\n", - " kite 128 10 1 0.705 0.995 0.55\n", - " baseball bat 128 4 1 0.948 0.995 0.72\n", - " baseball glove 128 7 0.908 0.429 0.448 0.375\n", - " skateboard 128 5 0.951 1 0.995 0.795\n", - " tennis racket 128 7 0.961 0.714 0.726 0.56\n", - " bottle 128 18 1 0.625 0.949 0.743\n", - " wine glass 128 16 0.92 0.721 0.951 0.705\n", - " cup 128 36 1 0.877 0.975 0.825\n", - " fork 128 6 1 0.966 0.995 0.848\n", - " knife 128 16 0.924 0.762 0.934 0.737\n", - " spoon 128 22 1 0.839 0.909 0.697\n", - " bowl 128 28 1 0.866 0.95 0.839\n", - " banana 128 1 0.807 1 0.995 0.995\n", - " sandwich 128 2 0.861 1 0.995 0.995\n", - " orange 128 4 0.921 1 0.995 0.905\n", - " broccoli 128 11 0.951 0.636 0.876 0.679\n", - " carrot 128 24 0.962 0.917 0.974 0.807\n", - " hot dog 128 2 0.869 1 0.995 0.995\n", - " pizza 128 5 0.939 1 0.995 0.995\n", - " donut 128 14 0.927 1 0.995 0.971\n", - " cake 128 4 0.933 1 0.995 0.962\n", - " chair 128 35 1 0.722 0.966 0.761\n", - " couch 128 6 0.947 1 0.995 0.95\n", - " potted plant 128 14 0.977 1 0.995 0.895\n", - " bed 128 3 0.91 1 0.995 0.943\n", - " dining table 128 13 0.98 1 0.995 0.879\n", - " toilet 128 2 0.885 1 0.995 0.995\n", - " tv 128 2 0.884 1 0.995 0.995\n", - " laptop 128 3 0.926 1 0.995 0.964\n", - " mouse 128 2 1 0.655 0.995 0.703\n", - " remote 128 8 0.976 0.75 0.763 0.684\n", - " cell phone 128 8 1 0.892 0.995 0.771\n", - " microwave 128 3 0.907 1 0.995 0.895\n", - " oven 128 5 0.947 1 0.995 0.865\n", - " sink 128 6 0.826 0.797 0.948 0.759\n", - " refrigerator 128 5 0.944 1 0.995 0.948\n", - " book 128 29 0.942 0.56 0.864 0.612\n", - " clock 128 9 0.965 1 0.995 0.929\n", - " vase 128 2 0.873 1 0.995 0.995\n", - " scissors 128 1 0.804 1 0.995 0.995\n", - " teddy bear 128 21 0.928 1 0.995 0.89\n", - " toothbrush 128 5 0.943 1 0.995 0.876\n", - "Speed: 0.1ms preprocess, 5.4ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_4_finetune31\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.9ms inference, 0.0ms loss, 0.3ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_4_finetune32\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2400,82 +1201,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 40919781 parameters, 0 gradients, 154.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.929 0.891 0.944 0.82\n", - " person 128 254 0.955 0.843 0.944 0.782\n", - " bicycle 128 6 1 0.624 0.731 0.531\n", - " car 128 46 0.924 0.529 0.715 0.4\n", - " motorcycle 128 5 0.929 1 0.995 0.944\n", - " airplane 128 6 0.946 1 0.995 0.95\n", - " bus 128 7 1 0.94 0.995 0.82\n", - " train 128 3 0.912 1 0.995 0.883\n", - " truck 128 12 1 0.666 0.837 0.686\n", - " boat 128 6 0.919 0.833 0.972 0.8\n", - " traffic light 128 14 0.954 0.571 0.653 0.361\n", - " stop sign 128 2 0.864 1 0.995 0.949\n", - " bench 128 9 0.966 1 0.995 0.917\n", - " bird 128 16 0.979 1 0.995 0.881\n", - " cat 128 4 0.916 1 0.995 0.995\n", - " dog 128 9 0.961 1 0.995 0.986\n", - " horse 128 2 0.854 1 0.995 0.995\n", - " elephant 128 17 0.975 1 0.995 0.929\n", - " bear 128 1 0.779 1 0.995 0.895\n", - " zebra 128 4 0.927 1 0.995 0.995\n", - " giraffe 128 9 0.963 1 0.995 0.933\n", - " backpack 128 6 0.912 0.833 0.846 0.791\n", - " umbrella 128 18 1 0.935 0.992 0.869\n", - " handbag 128 19 0.939 0.815 0.888 0.729\n", - " tie 128 7 1 0.818 0.873 0.752\n", - " suitcase 128 4 1 0.951 0.995 0.758\n", - " frisbee 128 5 0.936 0.8 0.962 0.776\n", - " skis 128 1 0.783 1 0.995 0.995\n", - " snowboard 128 7 0.946 0.857 0.873 0.796\n", - " sports ball 128 6 0.935 0.667 0.775 0.412\n", - " kite 128 10 1 0.716 0.995 0.529\n", - " baseball bat 128 4 1 0.98 0.995 0.685\n", - " baseball glove 128 7 0.887 0.429 0.448 0.39\n", - " skateboard 128 5 0.932 1 0.995 0.768\n", - " tennis racket 128 7 0.931 0.714 0.726 0.575\n", - " bottle 128 18 1 0.667 0.943 0.699\n", - " wine glass 128 16 0.926 0.785 0.964 0.697\n", - " cup 128 36 0.969 0.857 0.966 0.816\n", - " fork 128 6 0.938 1 0.995 0.773\n", - " knife 128 16 0.929 0.819 0.945 0.738\n", - " spoon 128 22 1 0.854 0.909 0.695\n", - " bowl 128 28 0.983 0.893 0.946 0.822\n", - " banana 128 1 0.783 1 0.995 0.995\n", - " sandwich 128 2 0.822 1 0.995 0.995\n", - " orange 128 4 0.91 1 0.995 0.895\n", - " broccoli 128 11 1 0.687 0.886 0.691\n", - " carrot 128 24 0.915 0.917 0.97 0.793\n", - " hot dog 128 2 0.842 1 0.995 0.995\n", - " pizza 128 5 0.931 1 0.995 0.959\n", - " donut 128 14 0.922 1 0.995 0.97\n", - " cake 128 4 0.921 1 0.995 0.995\n", - " chair 128 35 1 0.716 0.957 0.739\n", - " couch 128 6 0.944 1 0.995 0.95\n", - " potted plant 128 14 0.974 1 0.995 0.864\n", - " bed 128 3 0.897 1 0.995 0.955\n", - " dining table 128 13 0.971 1 0.995 0.857\n", - " toilet 128 2 0.866 1 0.995 0.995\n", - " tv 128 2 0.864 1 0.995 0.995\n", - " laptop 128 3 0.883 1 0.995 0.914\n", - " mouse 128 2 1 0.692 0.995 0.802\n", - " remote 128 8 1 0.72 0.766 0.711\n", - " cell phone 128 8 0.941 0.75 0.967 0.711\n", - " microwave 128 3 0.888 1 0.995 0.915\n", - " oven 128 5 0.926 1 0.995 0.864\n", - " sink 128 6 0.822 0.776 0.924 0.709\n", - " refrigerator 128 5 0.933 1 0.995 0.977\n", - " book 128 29 0.894 0.582 0.868 0.616\n", - " clock 128 9 0.951 1 0.995 0.931\n", - " vase 128 2 0.857 1 0.995 0.995\n", - " scissors 128 1 0.776 1 0.995 0.995\n", - " teddy bear 128 21 0.925 1 0.995 0.89\n", - " toothbrush 128 5 0.932 1 0.995 0.863\n", - "Speed: 0.2ms preprocess, 13.3ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_4_post_val30\u001b[0m\n" + "Speed: 0.1ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_4_post_val31\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2496,86 +1227,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", "YOLOv8l summary (fused): 285 layers, 39455305 parameters, 74176 gradients, 149.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.916 0.864 0.929 0.789\n", - " person 128 254 0.962 0.807 0.93 0.743\n", - " bicycle 128 6 1 0.597 0.806 0.511\n", - " car 128 46 1 0.378 0.643 0.362\n", - " motorcycle 128 5 1 0.918 0.995 0.925\n", - " airplane 128 6 0.934 1 0.995 0.896\n", - " bus 128 7 1 0.811 0.995 0.839\n", - " train 128 3 0.848 1 0.995 0.889\n", - " truck 128 12 0.925 0.5 0.68 0.583\n", - " boat 128 6 0.912 0.833 0.942 0.753\n", - " traffic light 128 14 0.945 0.429 0.56 0.282\n", - " stop sign 128 2 0.825 1 0.995 0.895\n", - " bench 128 9 0.979 1 0.995 0.886\n", - " bird 128 16 0.96 1 0.995 0.809\n", - " cat 128 4 0.892 1 0.995 0.908\n", - " dog 128 9 0.954 1 0.995 0.942\n", - " horse 128 2 0.709 1 0.995 0.995\n", - " elephant 128 17 0.988 1 0.995 0.917\n", - " bear 128 1 0.727 1 0.995 0.995\n", - " zebra 128 4 0.907 1 0.995 0.978\n", - " giraffe 128 9 0.941 1 0.995 0.908\n", - " backpack 128 6 0.883 0.833 0.846 0.748\n", - " umbrella 128 18 0.963 1 0.995 0.846\n", - " handbag 128 19 0.94 0.821 0.848 0.699\n", - " tie 128 7 0.897 0.714 0.732 0.648\n", - " suitcase 128 4 0.947 1 0.995 0.848\n", - " frisbee 128 5 0.921 0.8 0.823 0.744\n", - " skis 128 1 0.725 1 0.995 0.895\n", - " snowboard 128 7 0.931 0.857 0.872 0.72\n", - " sports ball 128 6 1 0.477 0.64 0.405\n", - " kite 128 10 1 0.765 0.922 0.505\n", - " baseball bat 128 4 1 0.912 0.995 0.684\n", - " baseball glove 128 7 0.928 0.429 0.453 0.345\n", - " skateboard 128 5 0.871 1 0.995 0.708\n", - " tennis racket 128 7 0.929 0.714 0.729 0.597\n", - " bottle 128 18 0.985 0.722 0.938 0.64\n", - " wine glass 128 16 0.87 0.836 0.961 0.677\n", - " cup 128 36 0.936 0.818 0.91 0.74\n", - " fork 128 6 1 0.931 0.995 0.72\n", - " knife 128 16 0.919 0.812 0.921 0.634\n", - " spoon 128 22 0.942 0.732 0.86 0.675\n", - " bowl 128 28 1 0.857 0.939 0.785\n", - " banana 128 1 0.733 1 0.995 0.995\n", - " sandwich 128 2 0.806 1 0.995 0.995\n", - " orange 128 4 0.899 1 0.995 0.86\n", - " broccoli 128 11 1 0.548 0.874 0.635\n", - " carrot 128 24 0.987 0.917 0.978 0.768\n", - " hot dog 128 2 0.79 1 0.995 0.995\n", - " pizza 128 5 0.913 1 0.995 0.978\n", - " donut 128 14 0.91 1 0.99 0.942\n", - " cake 128 4 0.907 1 0.995 0.949\n", - " chair 128 35 0.979 0.743 0.925 0.701\n", - " couch 128 6 0.929 1 0.995 0.834\n", - " potted plant 128 14 1 0.972 0.995 0.861\n", - " bed 128 3 0.876 1 0.995 0.953\n", - " dining table 128 13 0.943 0.923 0.99 0.833\n", - " toilet 128 2 0.825 1 0.995 0.949\n", - " tv 128 2 0.882 1 0.995 0.948\n", - " laptop 128 3 1 0.977 0.995 0.931\n", - " mouse 128 2 1 0.717 0.995 0.702\n", - " remote 128 8 0.837 0.5 0.729 0.681\n", - " cell phone 128 8 1 0.704 0.899 0.663\n", - " microwave 128 3 0.859 1 0.995 0.914\n", - " oven 128 5 0.904 1 0.995 0.896\n", - " sink 128 6 0.834 0.837 0.948 0.704\n", - " refrigerator 128 5 0.978 1 0.995 0.893\n", - " book 128 29 0.862 0.648 0.863 0.616\n", - " clock 128 9 1 0.519 0.995 0.88\n", - " vase 128 2 0.817 1 0.995 0.946\n", - " scissors 128 1 0.726 1 0.995 0.995\n", - " teddy bear 128 21 0.927 1 0.995 0.852\n", - " toothbrush 128 5 0.918 1 0.995 0.879\n", - "Speed: 0.2ms preprocess, 13.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_5_pre_val30\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_5_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_5_finetune30\n", + "Speed: 0.2ms preprocess, 13.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_5_pre_val31\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_5_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_5_finetune31\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -2594,148 +1253,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_5_finetune30/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_5_finetune31/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_5_finetune30\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_5_finetune31\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 12.1G 0.5773 0.3687 0.8973 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.5G 0.5773 0.3687 0.8973 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.904 0.881 0.935 0.801\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.8G 0.4697 0.3058 0.8551 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13G 0.4697 0.3058 0.8551 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.906 0.893 0.941 0.807\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.6G 0.5138 0.3263 0.8829 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12.9G 0.5138 0.3263 0.8829 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.92 0.889 0.942 0.811\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.6G 0.4938 0.3293 0.8823 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 13G 0.4938 0.3293 0.8823 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.928 0.883 0.944 0.813\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 11.6G 0.5047 0.3398 0.8637 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 12.9G 0.5047 0.3398 0.8637 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.88 0.943 0.819\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11.7G 0.5144 0.3483 0.886 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 13G 0.5144 0.3483 0.886 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.869 0.94 0.815\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 11.6G 0.539 0.3521 0.879 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 13G 0.539 0.3521 0.879 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.865 0.941 0.814\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 11.6G 0.5493 0.3683 0.895 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 13G 0.5493 0.3683 0.895 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.875 0.939 0.817\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.6G 0.581 0.3607 0.9096 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 13G 0.581 0.3607 0.9096 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.876 0.94 0.82\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.6G 0.6293 0.3874 0.9156 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12.9G 0.6293 0.3874 0.9156 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.938 0.886 0.946 0.822\n", "\n", - "10 epochs completed in 0.036 hours.\n", - "Optimizer stripped from runs/detect/step_5_finetune30/weights/last.pt, 158.5MB\n", - "Optimizer stripped from runs/detect/step_5_finetune30/weights/best.pt, 158.5MB\n", + "10 epochs completed in 0.035 hours.\n", + "Optimizer stripped from runs/detect/step_5_finetune31/weights/last.pt, 158.5MB\n", + "Optimizer stripped from runs/detect/step_5_finetune31/weights/best.pt, 158.5MB\n", "\n", - "Validating runs/detect/step_5_finetune30/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_5_finetune31/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 39455305 parameters, 0 gradients, 149.4 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.938 0.886 0.946 0.822\n", - " person 128 254 0.986 0.815 0.947 0.783\n", - " bicycle 128 6 0.997 0.667 0.793 0.571\n", - " car 128 46 0.917 0.482 0.751 0.427\n", - " motorcycle 128 5 0.946 1 0.995 0.929\n", - " airplane 128 6 0.956 1 0.995 0.929\n", - " bus 128 7 0.996 1 0.995 0.897\n", - " train 128 3 0.932 1 0.995 0.93\n", - " truck 128 12 0.973 0.583 0.796 0.654\n", - " boat 128 6 0.947 0.833 0.972 0.785\n", - " traffic light 128 14 0.875 0.5 0.646 0.348\n", - " stop sign 128 2 0.905 1 0.995 0.924\n", - " bench 128 9 0.952 1 0.995 0.903\n", - " bird 128 16 0.98 1 0.995 0.884\n", - " cat 128 4 0.926 1 0.995 0.974\n", - " dog 128 9 0.966 1 0.995 0.867\n", - " horse 128 2 0.739 1 0.995 0.949\n", - " elephant 128 17 0.979 1 0.995 0.879\n", - " bear 128 1 0.811 1 0.995 0.995\n", - " zebra 128 4 0.932 1 0.995 0.995\n", - " giraffe 128 9 0.964 1 0.995 0.942\n", - " backpack 128 6 0.941 0.833 0.845 0.793\n", - " umbrella 128 18 0.967 1 0.995 0.837\n", - " handbag 128 19 0.936 0.766 0.845 0.695\n", - " tie 128 7 0.962 0.857 0.873 0.794\n", - " suitcase 128 4 0.982 1 0.995 0.827\n", - " frisbee 128 5 0.995 0.8 0.962 0.802\n", - " skis 128 1 0.841 1 0.995 0.895\n", - " snowboard 128 7 0.974 0.857 0.93 0.81\n", - " sports ball 128 6 0.91 0.667 0.8 0.418\n", - " kite 128 10 1 0.599 0.995 0.58\n", - " baseball bat 128 4 1 0.889 0.995 0.731\n", - " baseball glove 128 7 0.713 0.429 0.486 0.337\n", - " skateboard 128 5 1 0.941 0.995 0.809\n", - " tennis racket 128 7 0.919 0.714 0.724 0.584\n", - " bottle 128 18 1 0.594 0.925 0.7\n", - " wine glass 128 16 0.841 0.665 0.938 0.667\n", - " cup 128 36 1 0.91 0.966 0.824\n", - " fork 128 6 1 0.869 0.995 0.809\n", - " knife 128 16 1 0.788 0.964 0.73\n", - " spoon 128 22 1 0.895 0.91 0.76\n", - " bowl 128 28 1 0.849 0.942 0.826\n", - " banana 128 1 0.826 1 0.995 0.995\n", - " sandwich 128 2 0.878 1 0.995 0.995\n", - " orange 128 4 0.943 1 0.995 0.862\n", - " broccoli 128 11 0.963 0.727 0.889 0.672\n", - " carrot 128 24 1 0.948 0.988 0.823\n", - " hot dog 128 2 0.878 1 0.995 0.995\n", - " pizza 128 5 0.94 1 0.995 0.995\n", - " donut 128 14 0.899 1 0.995 0.985\n", - " cake 128 4 0.936 1 0.995 0.974\n", - " chair 128 35 1 0.788 0.966 0.763\n", - " couch 128 6 0.941 1 0.995 0.898\n", - " potted plant 128 14 0.983 1 0.995 0.953\n", - " bed 128 3 0.91 1 0.995 0.942\n", - " dining table 128 13 0.925 0.952 0.979 0.883\n", - " toilet 128 2 0.888 1 0.995 0.995\n", - " tv 128 2 0.894 1 0.995 0.995\n", - " laptop 128 3 0.841 1 0.995 0.963\n", - " mouse 128 2 1 0.811 0.995 0.686\n", - " remote 128 8 1 0.684 0.763 0.684\n", - " cell phone 128 8 1 0.824 0.944 0.693\n", - " microwave 128 3 0.912 1 0.995 0.91\n", - " oven 128 5 0.952 1 0.995 0.923\n", - " sink 128 6 0.91 0.833 0.972 0.798\n", - " refrigerator 128 5 0.944 1 0.995 0.938\n", - " book 128 29 1 0.551 0.867 0.597\n", - " clock 128 9 0.967 1 0.995 0.855\n", - " vase 128 2 0.883 1 0.995 0.995\n", - " scissors 128 1 0.809 1 0.995 0.995\n", - " teddy bear 128 21 0.988 1 0.995 0.907\n", - " toothbrush 128 5 0.949 1 0.995 0.926\n", - "Speed: 0.1ms preprocess, 5.6ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_5_finetune30\u001b[0m\n" + "Speed: 0.1ms preprocess, 4.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_5_finetune31\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2751,85 +1340,13 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", "YOLOv8l summary (fused): 285 layers, 39455305 parameters, 0 gradients, 149.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.928 0.891 0.943 0.82\n", - " person 128 254 0.977 0.826 0.945 0.791\n", - " bicycle 128 6 0.989 0.667 0.735 0.552\n", - " car 128 46 0.922 0.516 0.776 0.434\n", - " motorcycle 128 5 0.941 1 0.995 0.925\n", - " airplane 128 6 0.952 1 0.995 0.946\n", - " bus 128 7 1 0.986 0.995 0.895\n", - " train 128 3 0.923 1 0.995 0.768\n", - " truck 128 12 0.957 0.5 0.716 0.585\n", - " boat 128 6 0.942 0.833 0.972 0.773\n", - " traffic light 128 14 0.876 0.506 0.588 0.35\n", - " stop sign 128 2 0.892 1 0.995 0.923\n", - " bench 128 9 0.944 1 0.995 0.897\n", - " bird 128 16 0.982 1 0.995 0.888\n", - " cat 128 4 0.92 1 0.995 0.973\n", - " dog 128 9 0.963 1 0.995 0.855\n", - " horse 128 2 0.685 1 0.995 0.995\n", - " elephant 128 17 0.979 1 0.995 0.892\n", - " bear 128 1 0.797 1 0.995 0.995\n", - " zebra 128 4 0.927 1 0.995 0.995\n", - " giraffe 128 9 0.96 1 0.995 0.95\n", - " backpack 128 6 0.934 0.833 0.845 0.763\n", - " umbrella 128 18 0.928 1 0.995 0.829\n", - " handbag 128 19 0.936 0.767 0.844 0.689\n", - " tie 128 7 0.962 0.857 0.869 0.79\n", - " suitcase 128 4 0.993 1 0.995 0.848\n", - " frisbee 128 5 1 0.81 0.995 0.805\n", - " skis 128 1 0.828 1 0.995 0.895\n", - " snowboard 128 7 0.951 0.857 0.916 0.814\n", - " sports ball 128 6 0.91 0.667 0.82 0.391\n", - " kite 128 10 1 0.697 0.986 0.593\n", - " baseball bat 128 4 1 0.907 0.995 0.796\n", - " baseball glove 128 7 0.706 0.429 0.442 0.35\n", - " skateboard 128 5 0.913 1 0.995 0.84\n", - " tennis racket 128 7 0.905 0.714 0.725 0.585\n", - " bottle 128 18 1 0.606 0.925 0.661\n", - " wine glass 128 16 0.859 0.765 0.941 0.676\n", - " cup 128 36 0.915 0.917 0.948 0.824\n", - " fork 128 6 0.988 1 0.995 0.754\n", - " knife 128 16 1 0.811 0.961 0.711\n", - " spoon 128 22 1 0.906 0.909 0.778\n", - " bowl 128 28 1 0.857 0.943 0.832\n", - " banana 128 1 0.812 1 0.995 0.995\n", - " sandwich 128 2 0.868 1 0.995 0.995\n", - " orange 128 4 0.94 1 0.995 0.863\n", - " broccoli 128 11 1 0.737 0.886 0.674\n", - " carrot 128 24 1 0.957 0.989 0.838\n", - " hot dog 128 2 0.867 1 0.995 0.995\n", - " pizza 128 5 0.936 1 0.995 0.96\n", - " donut 128 14 0.869 1 0.995 0.985\n", - " cake 128 4 0.931 1 0.995 0.995\n", - " chair 128 35 1 0.84 0.971 0.763\n", - " couch 128 6 0.944 1 0.995 0.923\n", - " potted plant 128 14 0.954 1 0.995 0.927\n", - " bed 128 3 0.902 1 0.995 0.942\n", - " dining table 128 13 0.919 1 0.974 0.884\n", - " toilet 128 2 0.877 1 0.995 0.995\n", - " tv 128 2 0.876 1 0.995 0.995\n", - " laptop 128 3 0.901 1 0.995 0.914\n", - " mouse 128 2 1 0.76 0.995 0.713\n", - " remote 128 8 0.98 0.625 0.747 0.672\n", - " cell phone 128 8 1 0.745 0.955 0.698\n", - " microwave 128 3 0.851 1 0.995 0.963\n", - " oven 128 5 0.949 1 0.995 0.894\n", - " sink 128 6 0.901 0.833 0.972 0.779\n", - " refrigerator 128 5 0.938 1 0.995 0.951\n", - " book 128 29 0.892 0.552 0.834 0.61\n", - " clock 128 9 0.962 1 0.995 0.864\n", - " vase 128 2 0.876 1 0.995 0.995\n", - " scissors 128 1 0.796 1 0.995 0.995\n", - " teddy bear 128 21 0.985 1 0.995 0.9\n", - " toothbrush 128 5 0.942 1 0.995 0.919\n", - "Speed: 0.2ms preprocess, 13.6ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_5_post_val30\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 12.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_5_post_val31\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -2851,84 +1368,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 37708749 parameters, 74176 gradients, 143.2 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.904 0.848 0.923 0.76\n", - " person 128 254 0.953 0.804 0.915 0.71\n", - " bicycle 128 6 1 0.478 0.766 0.546\n", - " car 128 46 0.908 0.428 0.68 0.381\n", - " motorcycle 128 5 1 0.944 0.995 0.884\n", - " airplane 128 6 0.93 1 0.995 0.933\n", - " bus 128 7 1 0.923 0.995 0.817\n", - " train 128 3 0.932 1 0.995 0.732\n", - " truck 128 12 1 0.43 0.753 0.542\n", - " boat 128 6 0.932 0.833 0.931 0.721\n", - " traffic light 128 14 0.65 0.429 0.419 0.311\n", - " stop sign 128 2 0.825 1 0.995 0.895\n", - " bench 128 9 0.863 1 0.995 0.769\n", - " bird 128 16 0.963 1 0.995 0.78\n", - " cat 128 4 0.892 1 0.995 0.888\n", - " dog 128 9 0.957 1 0.995 0.846\n", - " horse 128 2 0.686 1 0.995 0.895\n", - " elephant 128 17 0.975 1 0.995 0.872\n", - " bear 128 1 0.526 1 0.995 0.995\n", - " zebra 128 4 0.887 1 0.995 0.974\n", - " giraffe 128 9 0.955 1 0.995 0.943\n", - " backpack 128 6 0.88 0.833 0.877 0.736\n", - " umbrella 128 18 0.9 0.944 0.99 0.801\n", - " handbag 128 19 0.936 0.766 0.842 0.619\n", - " tie 128 7 0.926 0.857 0.871 0.768\n", - " suitcase 128 4 0.91 1 0.995 0.789\n", - " frisbee 128 5 1 0.738 0.92 0.752\n", - " skis 128 1 0.731 1 0.995 0.796\n", - " snowboard 128 7 0.939 0.857 0.874 0.671\n", - " sports ball 128 6 0.881 0.667 0.762 0.41\n", - " kite 128 10 1 0.648 0.935 0.593\n", - " baseball bat 128 4 0.987 1 0.995 0.718\n", - " baseball glove 128 7 0.882 0.429 0.443 0.342\n", - " skateboard 128 5 0.885 0.6 0.82 0.623\n", - " tennis racket 128 7 0.968 0.714 0.724 0.535\n", - " bottle 128 18 0.933 0.769 0.944 0.59\n", - " wine glass 128 16 0.776 1 0.951 0.658\n", - " cup 128 36 0.875 0.944 0.954 0.737\n", - " fork 128 6 1 0.563 0.942 0.684\n", - " knife 128 16 0.776 0.649 0.815 0.526\n", - " spoon 128 22 0.91 0.773 0.854 0.658\n", - " bowl 128 28 0.883 0.857 0.898 0.74\n", - " banana 128 1 0.711 1 0.995 0.895\n", - " sandwich 128 2 0.806 1 0.995 0.947\n", - " orange 128 4 1 0.637 0.995 0.87\n", - " broccoli 128 11 1 0.272 0.758 0.527\n", - " carrot 128 24 0.925 0.516 0.824 0.611\n", - " hot dog 128 2 0.8 1 0.995 0.995\n", - " pizza 128 5 0.902 1 0.995 0.92\n", - " donut 128 14 0.952 1 0.995 0.947\n", - " cake 128 4 1 0.997 0.995 0.895\n", - " chair 128 35 0.916 0.8 0.961 0.746\n", - " couch 128 6 0.875 1 0.995 0.857\n", - " potted plant 128 14 1 0.95 0.995 0.858\n", - " bed 128 3 0.827 1 0.995 0.908\n", - " dining table 128 13 0.86 0.943 0.944 0.835\n", - " toilet 128 2 0.838 1 0.995 0.947\n", - " tv 128 2 0.959 1 0.995 0.948\n", - " laptop 128 3 1 0.981 0.995 0.885\n", - " mouse 128 2 1 0.741 0.995 0.616\n", - " remote 128 8 1 0.624 0.735 0.633\n", - " cell phone 128 8 1 0.673 0.891 0.602\n", - " microwave 128 3 0.84 1 0.995 0.895\n", - " oven 128 5 0.955 1 0.995 0.855\n", - " sink 128 6 0.876 0.833 0.972 0.687\n", - " refrigerator 128 5 1 0.984 0.995 0.906\n", - " book 128 29 0.817 0.552 0.833 0.511\n", - " clock 128 9 1 0.884 0.947 0.808\n", - " vase 128 2 0.892 1 0.995 0.995\n", - " scissors 128 1 0.684 1 0.995 0.995\n", - " teddy bear 128 21 1 0.935 0.995 0.866\n", - " toothbrush 128 5 0.919 1 0.995 0.851\n", - "Speed: 0.2ms preprocess, 11.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_6_pre_val27\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_6_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_6_finetune27\n", + "Speed: 0.2ms preprocess, 10.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_6_pre_val28\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_6_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_6_finetune28\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -2947,149 +1393,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_6_finetune27/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_6_finetune28/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_6_finetune27\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_6_finetune28\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 11.3G 0.6267 0.3973 0.9214 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13.3G 0.6267 0.3973 0.9214 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.909 0.859 0.932 0.782\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 11.2G 0.5114 0.3263 0.8662 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 12.8G 0.5114 0.3263 0.8662 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.874 0.939 0.803\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 11.1G 0.5466 0.3434 0.8876 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12.7G 0.5466 0.3434 0.8876 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.931 0.875 0.939 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 11.1G 0.5238 0.3378 0.8878 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 12.7G 0.5238 0.3378 0.8878 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.874 0.939 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.8G 0.5198 0.3522 0.8708 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 12.7G 0.5198 0.3522 0.8708 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.947 0.87 0.94 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 11G 0.5276 0.352 0.8876 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 12.8G 0.5276 0.352 0.8876 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.879 0.939 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.8G 0.5516 0.3594 0.8792 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 12.8G 0.5516 0.3594 0.8792 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.931 0.882 0.943 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.8G 0.5795 0.3736 0.9109 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 12.8G 0.5795 0.3736 0.9109 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.893 0.949 0.817\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 11.1G 0.597 0.3801 0.9136 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 12.8G 0.597 0.3801 0.9136 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.89 0.948 0.815\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 11.1G 0.6406 0.3889 0.9059 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12.7G 0.6406 0.3889 0.9059 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.928 0.899 0.948 0.816\n", "\n", - "10 epochs completed in 0.024 hours.\n", - "Optimizer stripped from runs/detect/step_6_finetune27/weights/last.pt, 151.5MB\n", - "Optimizer stripped from runs/detect/step_6_finetune27/weights/best.pt, 151.5MB\n", + "10 epochs completed in 0.034 hours.\n", + "Optimizer stripped from runs/detect/step_6_finetune28/weights/last.pt, 151.5MB\n", + "Optimizer stripped from runs/detect/step_6_finetune28/weights/best.pt, 151.5MB\n", "\n", - "Validating runs/detect/step_6_finetune27/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_6_finetune28/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 37708749 parameters, 0 gradients, 143.2 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.889 0.948 0.817\n", - " person 128 254 0.973 0.837 0.949 0.777\n", - " bicycle 128 6 0.819 0.5 0.728 0.528\n", - " car 128 46 0.961 0.538 0.729 0.411\n", - " motorcycle 128 5 0.934 1 0.995 0.953\n", - " airplane 128 6 0.955 1 0.995 0.944\n", - " bus 128 7 0.988 1 0.995 0.887\n", - " train 128 3 0.91 1 0.995 0.966\n", - " truck 128 12 1 0.575 0.833 0.619\n", - " boat 128 6 0.941 0.833 0.972 0.814\n", - " traffic light 128 14 0.841 0.5 0.57 0.35\n", - " stop sign 128 2 0.879 1 0.995 0.949\n", - " bench 128 9 0.973 1 0.995 0.949\n", - " bird 128 16 0.969 1 0.995 0.807\n", - " cat 128 4 0.934 1 0.995 0.967\n", - " dog 128 9 0.963 1 0.995 0.916\n", - " horse 128 2 0.869 1 0.995 0.949\n", - " elephant 128 17 1 0.981 0.995 0.9\n", - " bear 128 1 0.8 1 0.995 0.995\n", - " zebra 128 4 0.927 1 0.995 0.976\n", - " giraffe 128 9 0.962 1 0.995 0.951\n", - " backpack 128 6 0.931 0.833 0.847 0.775\n", - " umbrella 128 18 0.977 1 0.995 0.851\n", - " handbag 128 19 0.874 0.73 0.871 0.681\n", - " tie 128 7 0.95 0.857 0.875 0.73\n", - " suitcase 128 4 0.956 1 0.995 0.827\n", - " frisbee 128 5 1 0.859 0.995 0.815\n", - " skis 128 1 0.811 1 0.995 0.895\n", - " snowboard 128 7 0.952 0.857 0.953 0.749\n", - " sports ball 128 6 0.97 0.667 0.823 0.378\n", - " kite 128 10 0.999 0.9 0.962 0.619\n", - " baseball bat 128 4 1 0.69 0.995 0.746\n", - " baseball glove 128 7 1 0.484 0.606 0.412\n", - " skateboard 128 5 0.962 1 0.995 0.77\n", - " tennis racket 128 7 0.911 0.714 0.723 0.589\n", - " bottle 128 18 1 0.705 0.951 0.631\n", - " wine glass 128 16 1 0.649 0.916 0.651\n", - " cup 128 36 0.971 0.916 0.966 0.814\n", - " fork 128 6 1 0.984 0.995 0.765\n", - " knife 128 16 1 0.763 0.931 0.693\n", - " spoon 128 22 0.952 0.9 0.939 0.745\n", - " bowl 128 28 1 0.818 0.944 0.817\n", - " banana 128 1 0.801 1 0.995 0.995\n", - " sandwich 128 2 0.807 1 0.995 0.995\n", - " orange 128 4 0.793 1 0.995 0.904\n", - " broccoli 128 11 0.93 0.818 0.898 0.614\n", - " carrot 128 24 0.955 0.875 0.98 0.773\n", - " hot dog 128 2 0.868 1 0.995 0.995\n", - " pizza 128 5 0.937 1 0.995 0.978\n", - " donut 128 14 0.924 1 0.995 0.98\n", - " cake 128 4 0.925 1 0.995 0.938\n", - " chair 128 35 0.967 0.835 0.964 0.765\n", - " couch 128 6 0.938 1 0.995 0.939\n", - " potted plant 128 14 0.92 1 0.962 0.836\n", - " bed 128 3 0.901 1 0.995 0.995\n", - " dining table 128 13 1 0.912 0.995 0.866\n", - " toilet 128 2 0.892 1 0.995 0.995\n", - " tv 128 2 0.872 1 0.995 0.995\n", - " laptop 128 3 1 0.961 0.995 0.964\n", - " mouse 128 2 1 0.715 0.995 0.71\n", - " remote 128 8 0.841 0.625 0.799 0.67\n", - " cell phone 128 8 1 0.783 0.982 0.705\n", - " microwave 128 3 0.909 1 0.995 0.935\n", - " oven 128 5 0.957 1 0.995 0.893\n", - " sink 128 6 1 0.943 0.995 0.79\n", - " refrigerator 128 5 0.936 1 0.995 0.923\n", - " book 128 29 0.81 0.588 0.869 0.6\n", - " clock 128 9 0.968 1 0.995 0.926\n", - " vase 128 2 0.86 1 0.995 0.995\n", - " scissors 128 1 0.808 1 0.995 0.895\n", - " teddy bear 128 21 0.988 1 0.995 0.917\n", - " toothbrush 128 5 0.942 1 0.995 0.929\n", - "Speed: 0.1ms preprocess, 5.4ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_6_finetune27\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_6_finetune28\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -3106,82 +1481,11 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 37708749 parameters, 0 gradients, 143.2 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.937 0.878 0.946 0.808\n", - " person 128 254 0.977 0.832 0.948 0.787\n", - " bicycle 128 6 0.849 0.5 0.704 0.523\n", - " car 128 46 0.96 0.516 0.731 0.405\n", - " motorcycle 128 5 0.943 1 0.995 0.928\n", - " airplane 128 6 0.964 1 0.995 0.928\n", - " bus 128 7 1 0.978 0.995 0.869\n", - " train 128 3 0.909 1 0.995 0.893\n", - " truck 128 12 0.887 0.583 0.734 0.606\n", - " boat 128 6 0.959 0.833 0.972 0.814\n", - " traffic light 128 14 0.855 0.5 0.538 0.362\n", - " stop sign 128 2 0.894 1 0.995 0.95\n", - " bench 128 9 0.987 1 0.995 0.938\n", - " bird 128 16 0.965 1 0.995 0.802\n", - " cat 128 4 0.942 1 0.995 0.948\n", - " dog 128 9 0.971 1 0.995 0.881\n", - " horse 128 2 0.888 1 0.995 0.995\n", - " elephant 128 17 1 0.972 0.995 0.928\n", - " bear 128 1 0.821 1 0.995 0.995\n", - " zebra 128 4 0.937 1 0.995 0.974\n", - " giraffe 128 9 0.966 1 0.995 0.93\n", - " backpack 128 6 0.94 0.833 0.845 0.751\n", - " umbrella 128 18 0.99 1 0.995 0.829\n", - " handbag 128 19 0.869 0.684 0.874 0.68\n", - " tie 128 7 0.959 0.857 0.873 0.724\n", - " suitcase 128 4 0.961 1 0.995 0.831\n", - " frisbee 128 5 0.95 0.8 0.962 0.805\n", - " skis 128 1 0.832 1 0.995 0.895\n", - " snowboard 128 7 0.959 0.857 0.953 0.771\n", - " sports ball 128 6 1 0.686 0.85 0.338\n", - " kite 128 10 1 0.882 0.962 0.637\n", - " baseball bat 128 4 1 0.657 0.995 0.749\n", - " baseball glove 128 7 1 0.476 0.604 0.391\n", - " skateboard 128 5 0.95 1 0.995 0.792\n", - " tennis racket 128 7 0.923 0.714 0.721 0.585\n", - " bottle 128 18 1 0.681 0.957 0.607\n", - " wine glass 128 16 1 0.593 0.949 0.664\n", - " cup 128 36 0.969 0.873 0.978 0.815\n", - " fork 128 6 0.987 1 0.995 0.751\n", - " knife 128 16 1 0.713 0.923 0.665\n", - " spoon 128 22 0.928 0.864 0.929 0.724\n", - " bowl 128 28 1 0.813 0.942 0.8\n", - " banana 128 1 0.821 1 0.995 0.995\n", - " sandwich 128 2 0.859 1 0.995 0.995\n", - " orange 128 4 0.817 1 0.995 0.904\n", - " broccoli 128 11 0.834 0.818 0.889 0.635\n", - " carrot 128 24 0.971 0.875 0.981 0.796\n", - " hot dog 128 2 0.88 1 0.995 0.995\n", - " pizza 128 5 0.944 1 0.995 0.931\n", - " donut 128 14 0.922 1 0.995 0.987\n", - " cake 128 4 0.934 1 0.995 0.905\n", - " chair 128 35 0.965 0.785 0.973 0.768\n", - " couch 128 6 0.94 1 0.995 0.939\n", - " potted plant 128 14 0.923 1 0.981 0.868\n", - " bed 128 3 0.912 1 0.995 0.943\n", - " dining table 128 13 1 0.903 0.99 0.863\n", - " toilet 128 2 0.906 1 0.995 0.995\n", - " tv 128 2 0.886 1 0.995 0.922\n", - " laptop 128 3 1 0.863 0.995 0.931\n", - " mouse 128 2 1 0.632 0.995 0.664\n", - " remote 128 8 0.854 0.625 0.788 0.653\n", - " cell phone 128 8 1 0.699 0.995 0.722\n", - " microwave 128 3 0.919 1 0.995 0.936\n", - " oven 128 5 0.956 1 0.995 0.865\n", - " sink 128 6 1 0.926 0.995 0.771\n", - " refrigerator 128 5 0.943 1 0.995 0.905\n", - " book 128 29 0.798 0.547 0.852 0.578\n", - " clock 128 9 0.97 1 0.995 0.913\n", - " vase 128 2 0.88 1 0.995 0.995\n", - " scissors 128 1 0.829 1 0.995 0.895\n", - " teddy bear 128 21 0.987 1 0.995 0.897\n", - " toothbrush 128 5 0.953 1 0.995 0.954\n", - "Speed: 0.2ms preprocess, 11.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_6_post_val26\u001b[0m\n" + "Speed: 0.1ms preprocess, 10.8ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_6_post_val27\u001b[0m\n" ] }, { @@ -3202,86 +1506,15 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 35995675 parameters, 74176 gradients, 136.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.838 0.847 0.905 0.744\n", - " person 128 254 0.942 0.811 0.906 0.721\n", - " bicycle 128 6 0.889 0.667 0.801 0.521\n", - " car 128 46 0.844 0.5 0.629 0.359\n", - " motorcycle 128 5 0.849 1 0.995 0.861\n", - " airplane 128 6 0.886 1 0.995 0.937\n", - " bus 128 7 1 0.879 0.995 0.868\n", - " train 128 3 0.731 1 0.995 0.912\n", - " truck 128 12 0.884 0.5 0.674 0.492\n", - " boat 128 6 0.777 0.833 0.942 0.737\n", - " traffic light 128 14 0.578 0.105 0.452 0.222\n", - " stop sign 128 2 1 0.799 0.995 0.895\n", - " bench 128 9 0.944 0.778 0.926 0.824\n", - " bird 128 16 0.945 1 0.995 0.812\n", - " cat 128 4 0.84 1 0.995 0.815\n", - " dog 128 9 0.935 1 0.995 0.87\n", - " horse 128 2 0.566 1 0.995 0.924\n", - " elephant 128 17 0.964 1 0.995 0.851\n", - " bear 128 1 0.593 1 0.995 0.995\n", - " zebra 128 4 0.838 1 0.995 0.974\n", - " giraffe 128 9 0.924 1 0.995 0.942\n", - " backpack 128 6 0.82 0.768 0.764 0.6\n", - " umbrella 128 18 0.892 1 0.995 0.813\n", - " handbag 128 19 0.803 0.737 0.8 0.617\n", - " tie 128 7 0.888 0.857 0.889 0.771\n", - " suitcase 128 4 0.91 1 0.995 0.778\n", - " frisbee 128 5 1 0.76 0.962 0.664\n", - " skis 128 1 0.59 1 0.995 0.995\n", - " snowboard 128 7 0.807 0.857 0.909 0.731\n", - " sports ball 128 6 0.941 0.667 0.699 0.381\n", - " kite 128 10 0.727 0.9 0.957 0.612\n", - " baseball bat 128 4 0.988 0.75 0.845 0.498\n", - " baseball glove 128 7 0.805 0.429 0.473 0.35\n", - " skateboard 128 5 0.603 1 0.995 0.718\n", - " tennis racket 128 7 0.859 0.714 0.722 0.567\n", - " bottle 128 18 0.774 0.572 0.776 0.538\n", - " wine glass 128 16 0.709 0.938 0.858 0.575\n", - " cup 128 36 0.89 0.897 0.927 0.699\n", - " fork 128 6 0.76 0.667 0.803 0.665\n", - " knife 128 16 0.689 0.75 0.78 0.531\n", - " spoon 128 22 0.863 0.909 0.899 0.647\n", - " bowl 128 28 0.899 0.857 0.913 0.756\n", - " banana 128 1 0.576 1 0.995 0.895\n", - " sandwich 128 2 0.532 1 0.995 0.995\n", - " orange 128 4 0.864 1 0.995 0.855\n", - " broccoli 128 11 0.804 0.545 0.667 0.483\n", - " carrot 128 24 1 0.884 0.989 0.739\n", - " hot dog 128 2 0.737 1 0.995 0.995\n", - " pizza 128 5 0.91 1 0.995 0.937\n", - " donut 128 14 0.956 1 0.995 0.937\n", - " cake 128 4 0.849 1 0.995 0.904\n", - " chair 128 35 0.894 0.8 0.888 0.657\n", - " couch 128 6 0.881 1 0.995 0.884\n", - " potted plant 128 14 0.907 0.696 0.823 0.696\n", - " bed 128 3 0.795 1 0.995 0.953\n", - " dining table 128 13 0.86 0.923 0.968 0.84\n", - " toilet 128 2 0.774 1 0.995 0.903\n", - " tv 128 2 0.597 0.5 0.537 0.484\n", - " laptop 128 3 1 0.986 0.995 0.866\n", - " mouse 128 2 1 0.633 0.995 0.562\n", - " remote 128 8 0.87 0.625 0.783 0.609\n", - " cell phone 128 8 0.669 0.757 0.832 0.613\n", - " microwave 128 3 0.89 1 0.995 0.865\n", - " oven 128 5 1 0.788 0.962 0.842\n", - " sink 128 6 0.887 0.833 0.931 0.765\n", - " refrigerator 128 5 0.867 1 0.995 0.905\n", - " book 128 29 0.824 0.484 0.77 0.459\n", - " clock 128 9 0.975 0.889 0.984 0.87\n", - " vase 128 2 0.743 1 0.995 0.922\n", - " scissors 128 1 1 1 0.995 0.597\n", - " teddy bear 128 21 0.856 0.905 0.975 0.831\n", - " toothbrush 128 5 0.866 1 0.995 0.915\n", - "Speed: 0.2ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_7_pre_val24\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_7_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_7_finetune24\n", + "Speed: 0.2ms preprocess, 12.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_7_pre_val25\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_7_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_7_finetune25\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -3300,148 +1533,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_7_finetune24/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_7_finetune25/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_7_finetune24\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_7_finetune25\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 10.9G 0.6576 0.4219 0.9433 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 13G 0.6576 0.4219 0.9433 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.884 0.852 0.921 0.764\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.9G 0.5285 0.3538 0.8714 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 13.2G 0.5285 0.3538 0.8714 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.921 0.864 0.937 0.782\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 10.9G 0.5672 0.3781 0.8972 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12.4G 0.5672 0.3781 0.8972 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.921 0.87 0.94 0.791\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.9G 0.5324 0.3593 0.8898 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 12.4G 0.5324 0.3593 0.8898 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.921 0.869 0.937 0.796\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.9G 0.5564 0.395 0.8841 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 12.1G 0.5564 0.395 0.8841 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.926 0.888 0.941 0.799\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10.9G 0.5555 0.3674 0.9059 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 12.3G 0.5555 0.3674 0.9059 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.92 0.891 0.942 0.797\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.9G 0.5972 0.3946 0.9014 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 12.3G 0.5972 0.3946 0.9014 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.909 0.897 0.942 0.797\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.9G 0.6033 0.4048 0.9106 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 12.3G 0.6033 0.4048 0.9106 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.919 0.892 0.943 0.805\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.9G 0.6098 0.3878 0.9253 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 12.3G 0.6098 0.3878 0.9253 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.884 0.944 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10.9G 0.6518 0.4124 0.9181 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12.3G 0.6518 0.4124 0.9181 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.887 0.945 0.811\n", "\n", - "10 epochs completed in 0.024 hours.\n", - "Optimizer stripped from runs/detect/step_7_finetune24/weights/last.pt, 144.6MB\n", - "Optimizer stripped from runs/detect/step_7_finetune24/weights/best.pt, 144.6MB\n", + "10 epochs completed in 0.036 hours.\n", + "Optimizer stripped from runs/detect/step_7_finetune25/weights/last.pt, 144.6MB\n", + "Optimizer stripped from runs/detect/step_7_finetune25/weights/best.pt, 144.6MB\n", "\n", - "Validating runs/detect/step_7_finetune24/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_7_finetune25/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 35995675 parameters, 0 gradients, 136.7 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.887 0.945 0.81\n", - " person 128 254 0.986 0.827 0.943 0.776\n", - " bicycle 128 6 0.877 0.667 0.767 0.469\n", - " car 128 46 0.957 0.489 0.719 0.393\n", - " motorcycle 128 5 0.936 1 0.995 0.931\n", - " airplane 128 6 0.954 1 0.995 0.954\n", - " bus 128 7 1 0.891 0.995 0.86\n", - " train 128 3 0.91 1 0.995 0.995\n", - " truck 128 12 0.945 0.583 0.79 0.579\n", - " boat 128 6 1 0.957 0.995 0.745\n", - " traffic light 128 14 1 0.411 0.507 0.357\n", - " stop sign 128 2 0.873 1 0.995 0.801\n", - " bench 128 9 0.973 1 0.995 0.876\n", - " bird 128 16 0.978 1 0.995 0.85\n", - " cat 128 4 0.925 1 0.995 0.961\n", - " dog 128 9 0.966 1 0.995 0.933\n", - " horse 128 2 0.849 1 0.995 0.949\n", - " elephant 128 17 0.981 1 0.995 0.889\n", - " bear 128 1 0.785 1 0.995 0.995\n", - " zebra 128 4 0.922 1 0.995 0.974\n", - " giraffe 128 9 0.957 1 0.995 0.963\n", - " backpack 128 6 0.935 0.833 0.846 0.678\n", - " umbrella 128 18 0.915 1 0.995 0.846\n", - " handbag 128 19 0.936 0.77 0.885 0.68\n", - " tie 128 7 1 0.809 0.892 0.807\n", - " suitcase 128 4 0.939 1 0.995 0.827\n", - " frisbee 128 5 1 0.764 0.995 0.771\n", - " skis 128 1 0.806 1 0.995 0.895\n", - " snowboard 128 7 0.945 0.857 0.92 0.685\n", - " sports ball 128 6 0.972 0.667 0.8 0.41\n", - " kite 128 10 1 0.716 0.966 0.612\n", - " baseball bat 128 4 1 0.933 0.995 0.782\n", - " baseball glove 128 7 0.898 0.429 0.468 0.408\n", - " skateboard 128 5 0.79 1 0.995 0.84\n", - " tennis racket 128 7 0.929 0.714 0.754 0.577\n", - " bottle 128 18 1 0.543 0.88 0.629\n", - " wine glass 128 16 0.828 0.562 0.876 0.62\n", - " cup 128 36 1 0.928 0.967 0.803\n", - " fork 128 6 0.985 1 0.995 0.821\n", - " knife 128 16 0.994 0.75 0.941 0.674\n", - " spoon 128 22 1 0.86 0.91 0.724\n", - " bowl 128 28 0.948 0.893 0.954 0.793\n", - " banana 128 1 0.781 1 0.995 0.995\n", - " sandwich 128 2 0.855 1 0.995 0.995\n", - " orange 128 4 0.864 1 0.995 0.906\n", - " broccoli 128 11 0.983 0.818 0.891 0.654\n", - " carrot 128 24 0.958 0.961 0.989 0.786\n", - " hot dog 128 2 0.855 1 0.995 0.995\n", - " pizza 128 5 0.933 1 0.995 0.974\n", - " donut 128 14 0.956 1 0.995 0.988\n", - " cake 128 4 0.919 1 0.995 0.995\n", - " chair 128 35 0.966 0.819 0.973 0.774\n", - " couch 128 6 0.923 1 0.995 0.902\n", - " potted plant 128 14 0.974 1 0.995 0.87\n", - " bed 128 3 0.898 1 0.995 0.942\n", - " dining table 128 13 1 0.921 0.99 0.868\n", - " toilet 128 2 0.863 1 0.995 0.948\n", - " tv 128 2 1 0.866 0.995 0.948\n", - " laptop 128 3 0.793 1 0.995 0.964\n", - " mouse 128 2 1 0.812 0.995 0.608\n", - " remote 128 8 1 0.733 0.828 0.684\n", - " cell phone 128 8 0.994 0.875 0.962 0.761\n", - " microwave 128 3 0.901 1 0.995 0.955\n", - " oven 128 5 0.949 1 0.995 0.868\n", - " sink 128 6 1 0.773 0.995 0.785\n", - " refrigerator 128 5 0.935 1 0.995 0.899\n", - " book 128 29 0.892 0.569 0.878 0.594\n", - " clock 128 9 0.963 1 0.995 0.936\n", - " vase 128 2 0.832 1 0.995 0.995\n", - " scissors 128 1 0.788 1 0.995 0.895\n", - " teddy bear 128 21 0.985 1 0.995 0.901\n", - " toothbrush 128 5 0.949 1 0.995 0.995\n", - "Speed: 0.1ms preprocess, 5.5ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_7_finetune24\u001b[0m\n" + "Speed: 0.1ms preprocess, 4.8ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_7_finetune25\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -3457,85 +1620,12 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", "YOLOv8l summary (fused): 285 layers, 35995675 parameters, 0 gradients, 136.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.877 0.942 0.805\n", - " person 128 254 0.976 0.815 0.942 0.777\n", - " bicycle 128 6 0.916 0.667 0.779 0.471\n", - " car 128 46 0.915 0.47 0.694 0.399\n", - " motorcycle 128 5 0.933 1 0.995 0.931\n", - " airplane 128 6 0.955 1 0.995 0.964\n", - " bus 128 7 1 0.893 0.995 0.866\n", - " train 128 3 0.906 1 0.995 0.813\n", - " truck 128 12 0.952 0.583 0.789 0.594\n", - " boat 128 6 0.893 0.833 0.972 0.746\n", - " traffic light 128 14 1 0.407 0.508 0.354\n", - " stop sign 128 2 0.874 1 0.995 0.8\n", - " bench 128 9 0.887 1 0.995 0.862\n", - " bird 128 16 0.979 1 0.995 0.84\n", - " cat 128 4 0.919 1 0.995 0.95\n", - " dog 128 9 0.967 1 0.995 0.937\n", - " horse 128 2 0.842 1 0.995 0.95\n", - " elephant 128 17 0.983 1 0.995 0.92\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.923 1 0.995 0.974\n", - " giraffe 128 9 0.956 1 0.995 0.962\n", - " backpack 128 6 0.935 0.833 0.846 0.674\n", - " umbrella 128 18 0.941 1 0.995 0.837\n", - " handbag 128 19 0.933 0.735 0.885 0.691\n", - " tie 128 7 1 0.845 0.881 0.814\n", - " suitcase 128 4 0.936 1 0.995 0.826\n", - " frisbee 128 5 1 0.776 0.995 0.774\n", - " skis 128 1 0.805 1 0.995 0.895\n", - " snowboard 128 7 0.924 0.857 0.916 0.666\n", - " sports ball 128 6 1 0.621 0.802 0.401\n", - " kite 128 10 1 0.716 0.966 0.605\n", - " baseball bat 128 4 1 0.922 0.995 0.752\n", - " baseball glove 128 7 0.9 0.429 0.468 0.408\n", - " skateboard 128 5 0.914 1 0.995 0.793\n", - " tennis racket 128 7 0.93 0.714 0.749 0.587\n", - " bottle 128 18 1 0.538 0.869 0.608\n", - " wine glass 128 16 0.784 0.562 0.87 0.625\n", - " cup 128 36 0.97 0.897 0.97 0.805\n", - " fork 128 6 1 0.906 0.995 0.774\n", - " knife 128 16 1 0.688 0.944 0.674\n", - " spoon 128 22 1 0.802 0.91 0.732\n", - " bowl 128 28 0.957 0.893 0.95 0.756\n", - " banana 128 1 0.788 1 0.995 0.995\n", - " sandwich 128 2 0.859 1 0.995 0.995\n", - " orange 128 4 0.873 1 0.995 0.864\n", - " broccoli 128 11 0.946 0.818 0.875 0.672\n", - " carrot 128 24 0.952 0.958 0.989 0.79\n", - " hot dog 128 2 0.855 1 0.995 0.995\n", - " pizza 128 5 0.937 1 0.995 0.941\n", - " donut 128 14 0.945 1 0.995 0.988\n", - " cake 128 4 0.916 1 0.995 0.995\n", - " chair 128 35 0.95 0.829 0.974 0.77\n", - " couch 128 6 0.942 1 0.995 0.9\n", - " potted plant 128 14 0.977 1 0.995 0.871\n", - " bed 128 3 0.897 1 0.995 0.915\n", - " dining table 128 13 0.948 0.923 0.99 0.847\n", - " toilet 128 2 0.864 1 0.995 0.948\n", - " tv 128 2 1 0.776 0.995 0.923\n", - " laptop 128 3 0.734 1 0.995 0.931\n", - " mouse 128 2 1 0.704 0.995 0.657\n", - " remote 128 8 1 0.718 0.824 0.678\n", - " cell phone 128 8 1 0.845 0.971 0.775\n", - " microwave 128 3 0.904 1 0.995 0.954\n", - " oven 128 5 0.941 1 0.995 0.881\n", - " sink 128 6 1 0.765 0.931 0.783\n", - " refrigerator 128 5 0.938 1 0.995 0.897\n", - " book 128 29 0.89 0.555 0.833 0.574\n", - " clock 128 9 0.96 1 0.995 0.909\n", - " vase 128 2 0.849 1 0.995 0.995\n", - " scissors 128 1 0.807 1 0.995 0.995\n", - " teddy bear 128 21 1 0.99 0.995 0.921\n", - " toothbrush 128 5 0.947 1 0.995 0.972\n", - "Speed: 0.2ms preprocess, 13.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_7_post_val24\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 12.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_7_post_val25\u001b[0m\n" ] }, { @@ -3556,85 +1646,15 @@ "name": "stderr", "output_type": "stream", "text": [ + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 34583399 parameters, 74176 gradients, 131.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.861 0.846 0.915 0.747\n", - " person 128 254 0.954 0.799 0.926 0.716\n", - " bicycle 128 6 1 0.63 0.768 0.47\n", - " car 128 46 0.786 0.522 0.637 0.338\n", - " motorcycle 128 5 0.954 1 0.995 0.871\n", - " airplane 128 6 0.911 1 0.995 0.941\n", - " bus 128 7 1 0.965 0.995 0.791\n", - " train 128 3 0.709 1 0.995 0.789\n", - " truck 128 12 1 0.581 0.816 0.5\n", - " boat 128 6 0.839 0.833 0.931 0.643\n", - " traffic light 128 14 0.894 0.286 0.434 0.307\n", - " stop sign 128 2 0.771 1 0.995 0.796\n", - " bench 128 9 0.927 1 0.995 0.831\n", - " bird 128 16 0.955 1 0.995 0.76\n", - " cat 128 4 0.865 1 0.995 0.887\n", - " dog 128 9 0.933 1 0.995 0.927\n", - " horse 128 2 0.716 1 0.995 0.899\n", - " elephant 128 17 0.971 1 0.995 0.857\n", - " bear 128 1 0.654 1 0.995 0.895\n", - " zebra 128 4 0.864 1 0.995 0.95\n", - " giraffe 128 9 0.93 1 0.995 0.892\n", - " backpack 128 6 0.76 0.833 0.819 0.587\n", - " umbrella 128 18 0.816 1 0.977 0.778\n", - " handbag 128 19 0.932 0.725 0.81 0.576\n", - " tie 128 7 0.983 0.857 0.881 0.773\n", - " suitcase 128 4 0.862 1 0.995 0.849\n", - " frisbee 128 5 0.801 0.6 0.866 0.672\n", - " skis 128 1 0.667 1 0.995 0.796\n", - " snowboard 128 7 0.794 0.857 0.874 0.642\n", - " sports ball 128 6 1 0.345 0.732 0.38\n", - " kite 128 10 0.867 0.9 0.889 0.51\n", - " baseball bat 128 4 0.978 1 0.995 0.632\n", - " baseball glove 128 7 0.838 0.429 0.487 0.361\n", - " skateboard 128 5 0.901 1 0.995 0.761\n", - " tennis racket 128 7 0.854 0.571 0.686 0.514\n", - " bottle 128 18 0.901 0.5 0.789 0.545\n", - " wine glass 128 16 0.701 0.938 0.889 0.558\n", - " cup 128 36 0.911 0.855 0.915 0.702\n", - " fork 128 6 0.656 0.639 0.798 0.623\n", - " knife 128 16 0.821 0.575 0.783 0.529\n", - " spoon 128 22 0.933 0.634 0.773 0.611\n", - " bowl 128 28 0.928 0.916 0.922 0.734\n", - " banana 128 1 0.657 1 0.995 0.995\n", - " sandwich 128 2 0.766 1 0.995 0.995\n", - " orange 128 4 0.813 1 0.995 0.79\n", - " broccoli 128 11 0.857 0.818 0.832 0.609\n", - " carrot 128 24 0.899 0.958 0.984 0.729\n", - " hot dog 128 2 0.76 1 0.995 0.995\n", - " pizza 128 5 0.887 1 0.995 0.924\n", - " donut 128 14 0.953 1 0.995 0.957\n", - " cake 128 4 0.861 1 0.995 0.947\n", - " chair 128 35 0.965 0.857 0.934 0.702\n", - " couch 128 6 1 0.795 0.995 0.925\n", - " potted plant 128 14 0.897 1 0.995 0.844\n", - " bed 128 3 0.673 1 0.995 0.915\n", - " dining table 128 13 0.854 0.846 0.914 0.801\n", - " toilet 128 2 0.769 1 0.995 0.946\n", - " tv 128 2 0.644 0.5 0.828 0.765\n", - " laptop 128 3 0.71 0.667 0.913 0.7\n", - " mouse 128 2 1 0 0.995 0.615\n", - " remote 128 8 0.88 0.625 0.741 0.635\n", - " cell phone 128 8 1 0.605 0.834 0.546\n", - " microwave 128 3 0.907 1 0.995 0.867\n", - " oven 128 5 0.895 1 0.995 0.904\n", - " sink 128 6 1 0.821 0.955 0.749\n", - " refrigerator 128 5 0.894 1 0.995 0.883\n", - " book 128 29 0.775 0.759 0.812 0.497\n", - " clock 128 9 0.925 1 0.995 0.867\n", - " vase 128 2 0.779 1 0.995 0.895\n", - " scissors 128 1 0.67 1 0.995 0.995\n", - " teddy bear 128 21 0.984 1 0.995 0.865\n", - " toothbrush 128 5 0.887 1 0.995 0.978\n", - "Speed: 0.2ms preprocess, 11.8ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_8_pre_val23\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_8_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_8_finetune23\n", + "Speed: 0.2ms preprocess, 12.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_8_pre_val24\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_8_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_8_finetune24\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -3653,149 +1673,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_8_finetune23/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_8_finetune24/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_8_finetune23\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_8_finetune24\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 10.7G 0.6527 0.4186 0.9399 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.7G 0.6527 0.4186 0.9399 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.878 0.86 0.925 0.769\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.6G 0.5123 0.3376 0.8642 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 12.9G 0.5123 0.3376 0.8642 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.902 0.884 0.932 0.78\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 10.5G 0.5575 0.3672 0.8903 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12.2G 0.5575 0.3672 0.8903 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.918 0.887 0.935 0.784\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.6G 0.5313 0.3422 0.8975 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 12.2G 0.5313 0.3422 0.8975 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.913 0.897 0.936 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.6G 0.543 0.3699 0.874 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 12.2G 0.543 0.3699 0.874 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.922 0.891 0.939 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10.6G 0.5544 0.3693 0.9 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 12.2G 0.5544 0.3693 0.9 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.936 0.885 0.938 0.798\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.5G 0.5915 0.3854 0.8924 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 12.2G 0.5915 0.3854 0.8924 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.883 0.939 0.801\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.6G 0.6192 0.4081 0.9123 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 12G 0.6192 0.4081 0.9123 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.882 0.94 0.803\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.3G 0.6259 0.4123 0.9284 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 12.2G 0.6259 0.4123 0.9284 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.947 0.88 0.941 0.806\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10.5G 0.6654 0.4213 0.9262 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12.1G 0.6654 0.4213 0.9262 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.886 0.94 0.808\n", "\n", - "10 epochs completed in 0.024 hours.\n", - "Optimizer stripped from runs/detect/step_8_finetune23/weights/last.pt, 139.0MB\n", - "Optimizer stripped from runs/detect/step_8_finetune23/weights/best.pt, 139.0MB\n", + "10 epochs completed in 0.037 hours.\n", + "Optimizer stripped from runs/detect/step_8_finetune24/weights/last.pt, 139.0MB\n", + "Optimizer stripped from runs/detect/step_8_finetune24/weights/best.pt, 139.0MB\n", "\n", - "Validating runs/detect/step_8_finetune23/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_8_finetune24/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 34583399 parameters, 0 gradients, 131.4 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.948 0.882 0.94 0.808\n", - " person 128 254 0.981 0.834 0.94 0.777\n", - " bicycle 128 6 0.95 0.667 0.735 0.553\n", - " car 128 46 0.841 0.46 0.64 0.373\n", - " motorcycle 128 5 0.945 1 0.995 0.963\n", - " airplane 128 6 0.969 1 0.995 0.898\n", - " bus 128 7 0.989 1 0.995 0.841\n", - " train 128 3 0.919 1 0.995 0.995\n", - " truck 128 12 0.805 0.69 0.924 0.636\n", - " boat 128 6 0.933 0.833 0.972 0.755\n", - " traffic light 128 14 1 0.449 0.504 0.333\n", - " stop sign 128 2 0.904 1 0.995 0.849\n", - " bench 128 9 0.975 1 0.995 0.846\n", - " bird 128 16 0.981 1 0.995 0.85\n", - " cat 128 4 0.923 1 0.995 0.937\n", - " dog 128 9 0.979 1 0.995 0.927\n", - " horse 128 2 0.895 1 0.995 0.949\n", - " elephant 128 17 0.981 1 0.995 0.897\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.934 1 0.995 0.973\n", - " giraffe 128 9 0.965 1 0.995 0.976\n", - " backpack 128 6 0.954 0.833 0.845 0.734\n", - " umbrella 128 18 1 0.934 0.992 0.869\n", - " handbag 128 19 0.926 0.789 0.837 0.693\n", - " tie 128 7 1 0.827 0.889 0.775\n", - " suitcase 128 4 0.925 1 0.995 0.816\n", - " frisbee 128 5 1 0.827 0.995 0.767\n", - " skis 128 1 0.832 1 0.995 0.796\n", - " snowboard 128 7 0.965 0.857 0.877 0.691\n", - " sports ball 128 6 1 0.629 0.742 0.352\n", - " kite 128 10 1 0.852 0.966 0.595\n", - " baseball bat 128 4 1 0.733 0.995 0.733\n", - " baseball glove 128 7 0.928 0.429 0.458 0.364\n", - " skateboard 128 5 0.942 1 0.995 0.829\n", - " tennis racket 128 7 0.961 0.714 0.814 0.613\n", - " bottle 128 18 1 0.58 0.917 0.598\n", - " wine glass 128 16 1 0.538 0.867 0.613\n", - " cup 128 36 0.971 0.919 0.974 0.816\n", - " fork 128 6 1 0.824 0.955 0.797\n", - " knife 128 16 0.933 0.75 0.867 0.668\n", - " spoon 128 22 0.946 0.909 0.908 0.744\n", - " bowl 128 28 1 0.841 0.897 0.807\n", - " banana 128 1 0.82 1 0.995 0.995\n", - " sandwich 128 2 0.868 1 0.995 0.923\n", - " orange 128 4 0.93 1 0.995 0.87\n", - " broccoli 128 11 0.948 0.818 0.879 0.654\n", - " carrot 128 24 0.956 0.902 0.987 0.797\n", - " hot dog 128 2 0.874 1 0.995 0.995\n", - " pizza 128 5 0.94 1 0.995 0.974\n", - " donut 128 14 0.921 1 0.995 0.983\n", - " cake 128 4 0.933 1 0.995 0.995\n", - " chair 128 35 0.93 0.757 0.948 0.755\n", - " couch 128 6 0.969 1 0.995 0.934\n", - " potted plant 128 14 0.98 1 0.995 0.887\n", - " bed 128 3 0.915 1 0.995 0.942\n", - " dining table 128 13 1 0.911 0.966 0.844\n", - " toilet 128 2 0.899 1 0.995 0.995\n", - " tv 128 2 0.887 1 0.995 0.995\n", - " laptop 128 3 0.999 1 0.995 0.953\n", - " mouse 128 2 1 0.72 0.995 0.705\n", - " remote 128 8 0.953 0.625 0.75 0.629\n", - " cell phone 128 8 1 0.841 0.982 0.693\n", - " microwave 128 3 0.92 1 0.995 0.995\n", - " oven 128 5 0.97 1 0.995 0.933\n", - " sink 128 6 1 0.761 0.972 0.821\n", - " refrigerator 128 5 0.942 1 0.995 0.909\n", - " book 128 29 0.964 0.552 0.937 0.579\n", - " clock 128 9 0.977 1 0.995 0.896\n", - " vase 128 2 0.959 1 0.995 0.946\n", - " scissors 128 1 0.805 1 0.995 0.895\n", - " teddy bear 128 21 0.961 1 0.995 0.906\n", - " toothbrush 128 5 0.948 1 0.995 0.952\n", - "Speed: 0.1ms preprocess, 4.6ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_8_finetune23\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.1ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_8_finetune24\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -3812,83 +1761,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 34583399 parameters, 0 gradients, 131.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.885 0.94 0.804\n", - " person 128 254 0.973 0.851 0.946 0.77\n", - " bicycle 128 6 0.945 0.667 0.734 0.539\n", - " car 128 46 0.855 0.514 0.648 0.361\n", - " motorcycle 128 5 0.936 1 0.995 0.937\n", - " airplane 128 6 0.962 1 0.995 0.897\n", - " bus 128 7 0.985 1 0.995 0.849\n", - " train 128 3 0.91 1 0.995 0.953\n", - " truck 128 12 0.776 0.577 0.798 0.585\n", - " boat 128 6 1 0.893 0.995 0.772\n", - " traffic light 128 14 0.944 0.5 0.504 0.324\n", - " stop sign 128 2 0.889 1 0.995 0.848\n", - " bench 128 9 0.977 1 0.995 0.838\n", - " bird 128 16 0.978 1 0.995 0.841\n", - " cat 128 4 0.913 1 0.995 0.967\n", - " dog 128 9 0.978 1 0.995 0.937\n", - " horse 128 2 0.882 1 0.995 0.949\n", - " elephant 128 17 0.978 1 0.995 0.902\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.925 1 0.995 0.974\n", - " giraffe 128 9 0.959 1 0.995 0.933\n", - " backpack 128 6 0.947 0.833 0.847 0.739\n", - " umbrella 128 18 1 0.944 0.992 0.868\n", - " handbag 128 19 0.938 0.795 0.844 0.689\n", - " tie 128 7 0.972 0.857 0.884 0.755\n", - " suitcase 128 4 0.915 1 0.995 0.873\n", - " frisbee 128 5 0.959 0.8 0.962 0.76\n", - " skis 128 1 0.808 1 0.995 0.796\n", - " snowboard 128 7 1 0.911 0.995 0.747\n", - " sports ball 128 6 1 0.64 0.755 0.354\n", - " kite 128 10 1 0.869 0.962 0.577\n", - " baseball bat 128 4 1 0.735 0.995 0.731\n", - " baseball glove 128 7 0.916 0.429 0.453 0.375\n", - " skateboard 128 5 0.941 1 0.995 0.831\n", - " tennis racket 128 7 0.947 0.714 0.814 0.612\n", - " bottle 128 18 0.907 0.543 0.837 0.551\n", - " wine glass 128 16 1 0.577 0.898 0.637\n", - " cup 128 36 0.961 0.917 0.974 0.816\n", - " fork 128 6 0.943 0.833 0.955 0.744\n", - " knife 128 16 0.907 0.75 0.85 0.662\n", - " spoon 128 22 0.921 0.909 0.907 0.73\n", - " bowl 128 28 1 0.846 0.899 0.784\n", - " banana 128 1 0.808 1 0.995 0.995\n", - " sandwich 128 2 0.86 1 0.995 0.995\n", - " orange 128 4 0.923 1 0.995 0.875\n", - " broccoli 128 11 0.882 0.818 0.876 0.64\n", - " carrot 128 24 0.958 0.954 0.989 0.805\n", - " hot dog 128 2 0.861 1 0.995 0.995\n", - " pizza 128 5 0.933 1 0.995 0.938\n", - " donut 128 14 0.915 1 0.995 0.982\n", - " cake 128 4 0.924 1 0.995 0.995\n", - " chair 128 35 0.93 0.761 0.956 0.739\n", - " couch 128 6 0.978 1 0.995 0.928\n", - " potted plant 128 14 0.973 1 0.995 0.905\n", - " bed 128 3 0.899 1 0.995 0.942\n", - " dining table 128 13 0.987 0.923 0.979 0.824\n", - " toilet 128 2 0.884 1 0.995 0.995\n", - " tv 128 2 0.875 1 0.995 0.995\n", - " laptop 128 3 1 0.921 0.995 0.931\n", - " mouse 128 2 1 0.726 0.995 0.653\n", - " remote 128 8 0.944 0.625 0.749 0.628\n", - " cell phone 128 8 1 0.851 0.995 0.714\n", - " microwave 128 3 0.904 1 0.995 0.965\n", - " oven 128 5 0.961 1 0.995 0.897\n", - " sink 128 6 1 0.767 0.995 0.83\n", - " refrigerator 128 5 0.936 1 0.995 0.877\n", - " book 128 29 0.944 0.576 0.925 0.574\n", - " clock 128 9 0.968 1 0.995 0.881\n", - " vase 128 2 0.976 1 0.995 0.947\n", - " scissors 128 1 0.788 1 0.995 0.995\n", - " teddy bear 128 21 0.991 1 0.995 0.901\n", - " toothbrush 128 5 0.94 1 0.995 0.953\n", - "Speed: 0.2ms preprocess, 11.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_8_post_val23\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 12.5ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_8_post_val24\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -3910,84 +1788,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 33747610 parameters, 74176 gradients, 128.5 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.919 0.872 0.923 0.774\n", - " person 128 254 0.973 0.837 0.931 0.745\n", - " bicycle 128 6 1 0.65 0.782 0.561\n", - " car 128 46 0.903 0.404 0.636 0.376\n", - " motorcycle 128 5 0.921 1 0.995 0.931\n", - " airplane 128 6 0.955 1 0.995 0.906\n", - " bus 128 7 1 0.866 0.995 0.861\n", - " train 128 3 0.883 1 0.995 0.909\n", - " truck 128 12 0.848 0.667 0.829 0.609\n", - " boat 128 6 0.925 0.833 0.842 0.638\n", - " traffic light 128 14 0.696 0.429 0.426 0.28\n", - " stop sign 128 2 0.866 1 0.995 0.895\n", - " bench 128 9 1 0.978 0.995 0.8\n", - " bird 128 16 0.953 1 0.995 0.856\n", - " cat 128 4 0.892 1 0.995 0.948\n", - " dog 128 9 0.992 1 0.995 0.945\n", - " horse 128 2 0.849 1 0.995 0.947\n", - " elephant 128 17 0.973 1 0.995 0.885\n", - " bear 128 1 0.753 1 0.995 0.895\n", - " zebra 128 4 0.907 1 0.995 0.968\n", - " giraffe 128 9 0.982 1 0.995 0.902\n", - " backpack 128 6 1 0.763 0.845 0.581\n", - " umbrella 128 18 0.998 1 0.995 0.838\n", - " handbag 128 19 0.934 0.789 0.84 0.63\n", - " tie 128 7 1 0.785 0.886 0.74\n", - " suitcase 128 4 0.908 1 0.995 0.873\n", - " frisbee 128 5 0.965 0.8 0.92 0.761\n", - " skis 128 1 0.777 1 0.995 0.597\n", - " snowboard 128 7 0.839 0.857 0.849 0.667\n", - " sports ball 128 6 0.994 0.667 0.686 0.325\n", - " kite 128 10 1 0.86 0.937 0.48\n", - " baseball bat 128 4 1 0.828 0.995 0.756\n", - " baseball glove 128 7 0.891 0.429 0.454 0.379\n", - " skateboard 128 5 0.955 1 0.995 0.739\n", - " tennis racket 128 7 0.966 0.714 0.722 0.608\n", - " bottle 128 18 0.92 0.642 0.855 0.609\n", - " wine glass 128 16 1 0.713 0.909 0.621\n", - " cup 128 36 0.915 0.902 0.968 0.772\n", - " fork 128 6 0.805 0.693 0.852 0.633\n", - " knife 128 16 1 0.679 0.846 0.638\n", - " spoon 128 22 0.92 0.864 0.88 0.677\n", - " bowl 128 28 0.979 0.857 0.903 0.762\n", - " banana 128 1 0.765 1 0.995 0.895\n", - " sandwich 128 2 0.834 1 0.995 0.995\n", - " orange 128 4 0.913 1 0.995 0.877\n", - " broccoli 128 11 0.973 0.818 0.852 0.626\n", - " carrot 128 24 0.936 0.833 0.962 0.716\n", - " hot dog 128 2 0.828 1 0.995 0.995\n", - " pizza 128 5 0.919 1 0.995 0.959\n", - " donut 128 14 0.921 1 0.995 0.959\n", - " cake 128 4 0.918 1 0.995 0.961\n", - " chair 128 35 0.927 0.771 0.917 0.731\n", - " couch 128 6 1 0.815 0.995 0.936\n", - " potted plant 128 14 1 0.998 0.995 0.85\n", - " bed 128 3 0.77 1 0.995 0.942\n", - " dining table 128 13 0.922 0.915 0.938 0.762\n", - " toilet 128 2 0.869 1 0.995 0.995\n", - " tv 128 2 0.831 1 0.995 0.995\n", - " laptop 128 3 1 0.946 0.995 0.895\n", - " mouse 128 2 0.731 0.5 0.745 0.507\n", - " remote 128 8 0.933 0.625 0.681 0.595\n", - " cell phone 128 8 1 0.783 0.944 0.63\n", - " microwave 128 3 0.879 1 0.995 0.934\n", - " oven 128 5 0.931 1 0.995 0.892\n", - " sink 128 6 1 0.753 0.995 0.79\n", - " refrigerator 128 5 1 0.988 0.995 0.904\n", - " book 128 29 0.835 0.698 0.898 0.515\n", - " clock 128 9 0.986 1 0.995 0.847\n", - " vase 128 2 0.975 1 0.995 0.895\n", - " scissors 128 1 0.737 1 0.995 0.995\n", - " teddy bear 128 21 0.954 0.985 0.993 0.89\n", - " toothbrush 128 5 0.928 1 0.995 0.895\n", - "Speed: 0.2ms preprocess, 12.3ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_9_pre_val22\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_9_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_9_finetune22\n", + "Speed: 0.2ms preprocess, 13.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_9_pre_val23\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_9_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_9_finetune23\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -4006,149 +1813,77 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_9_finetune22/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_9_finetune23/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_9_finetune22\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_9_finetune23\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 11.1G 0.6022 0.3899 0.9207 122 640: 100%|██████████| 8/8 [00:4\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.6G 0.6022 0.3899 0.9207 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.921 0.881 0.93 0.784\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.6G 0.4755 0.3118 0.851 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 12.1G 0.4755 0.3118 0.851 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.941 0.883 0.933 0.794\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 10.5G 0.5226 0.3441 0.8847 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 12G 0.5226 0.3441 0.8847 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.936 0.884 0.936 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.6G 0.5197 0.3324 0.8815 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.8G 0.5197 0.3324 0.8815 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.935 0.883 0.933 0.792\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.6G 0.5239 0.353 0.8671 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 12G 0.5239 0.353 0.8671 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.936 0.882 0.934 0.792\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10.6G 0.5413 0.3589 0.8919 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 12G 0.5413 0.3589 0.8919 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.93 0.877 0.934 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.6G 0.5753 0.3723 0.8863 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 12.1G 0.5753 0.3723 0.8863 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.873 0.933 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.6G 0.6104 0.3991 0.9113 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 12.1G 0.6104 0.3991 0.9113 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.95 0.868 0.931 0.8\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.6G 0.6059 0.395 0.9182 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 12.1G 0.6059 0.395 0.9182 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.873 0.932 0.805\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10.5G 0.6558 0.4098 0.9218 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 12G 0.6558 0.4098 0.9218 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.871 0.933 0.805\n", "\n", "10 epochs completed in 0.033 hours.\n", - "Optimizer stripped from runs/detect/step_9_finetune22/weights/last.pt, 135.6MB\n", - "Optimizer stripped from runs/detect/step_9_finetune22/weights/best.pt, 135.6MB\n", + "Optimizer stripped from runs/detect/step_9_finetune23/weights/last.pt, 135.6MB\n", + "Optimizer stripped from runs/detect/step_9_finetune23/weights/best.pt, 135.6MB\n", "\n", - "Validating runs/detect/step_9_finetune22/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_9_finetune23/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 33747610 parameters, 0 gradients, 128.5 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.871 0.933 0.806\n", - " person 128 254 0.981 0.809 0.942 0.771\n", - " bicycle 128 6 0.906 0.667 0.725 0.573\n", - " car 128 46 1 0.522 0.674 0.355\n", - " motorcycle 128 5 0.961 1 0.995 0.939\n", - " airplane 128 6 0.979 1 0.995 0.91\n", - " bus 128 7 1 0.927 0.995 0.876\n", - " train 128 3 0.945 1 0.995 0.909\n", - " truck 128 12 1 0.744 0.959 0.653\n", - " boat 128 6 0.956 0.833 0.955 0.767\n", - " traffic light 128 14 0.781 0.5 0.496 0.332\n", - " stop sign 128 2 0.886 1 0.995 0.895\n", - " bench 128 9 0.987 1 0.995 0.882\n", - " bird 128 16 0.931 1 0.995 0.869\n", - " cat 128 4 0.923 1 0.995 0.995\n", - " dog 128 9 0.988 1 0.995 0.947\n", - " horse 128 2 0.894 1 0.995 0.946\n", - " elephant 128 17 0.982 1 0.995 0.903\n", - " bear 128 1 0.823 1 0.995 0.995\n", - " zebra 128 4 0.939 1 0.995 0.995\n", - " giraffe 128 9 0.969 1 0.995 0.934\n", - " backpack 128 6 0.934 0.833 0.843 0.658\n", - " umbrella 128 18 0.942 0.906 0.989 0.873\n", - " handbag 128 19 0.987 0.789 0.848 0.673\n", - " tie 128 7 0.969 0.857 0.886 0.753\n", - " suitcase 128 4 0.945 1 0.995 0.848\n", - " frisbee 128 5 0.986 0.8 0.92 0.777\n", - " skis 128 1 0.864 1 0.995 0.895\n", - " snowboard 128 7 1 0.709 0.876 0.79\n", - " sports ball 128 6 0.965 0.667 0.685 0.347\n", - " kite 128 10 1 0.829 0.966 0.6\n", - " baseball bat 128 4 1 0.917 0.995 0.804\n", - " baseball glove 128 7 0.933 0.429 0.456 0.381\n", - " skateboard 128 5 0.975 1 0.995 0.767\n", - " tennis racket 128 7 0.953 0.714 0.722 0.607\n", - " bottle 128 18 0.935 0.5 0.865 0.614\n", - " wine glass 128 16 0.96 0.5 0.835 0.635\n", - " cup 128 36 1 0.829 0.975 0.796\n", - " fork 128 6 1 0.895 0.995 0.798\n", - " knife 128 16 1 0.686 0.913 0.708\n", - " spoon 128 22 0.92 0.864 0.925 0.752\n", - " bowl 128 28 1 0.819 0.932 0.798\n", - " banana 128 1 0.824 1 0.995 0.995\n", - " sandwich 128 2 0.884 1 0.995 0.923\n", - " orange 128 4 0.937 1 0.995 0.869\n", - " broccoli 128 11 0.911 0.818 0.858 0.685\n", - " carrot 128 24 0.957 0.921 0.992 0.789\n", - " hot dog 128 2 0.881 1 0.995 0.895\n", - " pizza 128 5 0.938 1 0.995 0.978\n", - " donut 128 14 0.959 1 0.995 0.983\n", - " cake 128 4 0.902 1 0.995 0.972\n", - " chair 128 35 1 0.72 0.97 0.767\n", - " couch 128 6 0.959 1 0.995 0.959\n", - " potted plant 128 14 0.976 1 0.995 0.864\n", - " bed 128 3 0.931 1 0.995 0.995\n", - " dining table 128 13 0.92 0.884 0.922 0.806\n", - " toilet 128 2 0.899 1 0.995 0.924\n", - " tv 128 2 0.878 1 0.995 0.995\n", - " laptop 128 3 1 0.867 0.995 0.895\n", - " mouse 128 2 0.844 0.5 0.828 0.64\n", - " remote 128 8 0.95 0.625 0.817 0.634\n", - " cell phone 128 8 1 0.707 0.923 0.697\n", - " microwave 128 3 0.927 1 0.995 0.954\n", - " oven 128 5 0.983 1 0.995 0.912\n", - " sink 128 6 1 0.731 0.858 0.711\n", - " refrigerator 128 5 0.98 1 0.995 0.899\n", - " book 128 29 0.942 0.556 0.875 0.585\n", - " clock 128 9 1 0.947 0.995 0.877\n", - " vase 128 2 0.891 1 0.995 0.995\n", - " scissors 128 1 0.819 1 0.995 0.895\n", - " teddy bear 128 21 0.989 1 0.995 0.871\n", - " toothbrush 128 5 0.947 1 0.995 0.936\n", - "Speed: 0.1ms preprocess, 4.9ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_9_finetune22\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.4ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_9_finetune23\u001b[0m\n" ] }, { @@ -4164,84 +1899,14 @@ "name": "stderr", "output_type": "stream", "text": [ + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 33747610 parameters, 0 gradients, 128.5 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.875 0.932 0.804\n", - " person 128 254 0.981 0.814 0.945 0.766\n", - " bicycle 128 6 0.918 0.667 0.753 0.556\n", - " car 128 46 1 0.498 0.683 0.341\n", - " motorcycle 128 5 0.952 1 0.995 0.918\n", - " airplane 128 6 0.977 1 0.995 0.895\n", - " bus 128 7 0.998 1 0.995 0.862\n", - " train 128 3 0.998 1 0.995 0.905\n", - " truck 128 12 0.968 0.667 0.838 0.618\n", - " boat 128 6 0.959 0.833 0.955 0.768\n", - " traffic light 128 14 0.785 0.5 0.504 0.334\n", - " stop sign 128 2 0.885 1 0.995 0.895\n", - " bench 128 9 0.983 1 0.995 0.902\n", - " bird 128 16 0.929 1 0.995 0.865\n", - " cat 128 4 0.928 1 0.995 0.967\n", - " dog 128 9 1 0.959 0.995 0.929\n", - " horse 128 2 0.892 1 0.995 0.947\n", - " elephant 128 17 0.982 1 0.995 0.908\n", - " bear 128 1 0.823 1 0.995 0.995\n", - " zebra 128 4 0.938 1 0.995 0.968\n", - " giraffe 128 9 0.968 1 0.995 0.937\n", - " backpack 128 6 0.939 0.833 0.842 0.656\n", - " umbrella 128 18 1 0.961 0.995 0.868\n", - " handbag 128 19 1 0.796 0.85 0.663\n", - " tie 128 7 0.963 0.857 0.883 0.769\n", - " suitcase 128 4 0.938 1 0.995 0.898\n", - " frisbee 128 5 1 0.791 0.895 0.79\n", - " skis 128 1 0.864 1 0.995 0.895\n", - " snowboard 128 7 0.946 0.857 0.953 0.783\n", - " sports ball 128 6 0.96 0.667 0.683 0.34\n", - " kite 128 10 1 0.826 0.954 0.579\n", - " baseball bat 128 4 1 0.886 0.995 0.753\n", - " baseball glove 128 7 0.934 0.429 0.457 0.382\n", - " skateboard 128 5 0.976 1 0.995 0.767\n", - " tennis racket 128 7 0.937 0.714 0.722 0.592\n", - " bottle 128 18 0.934 0.5 0.874 0.591\n", - " wine glass 128 16 0.916 0.5 0.842 0.633\n", - " cup 128 36 1 0.89 0.976 0.828\n", - " fork 128 6 0.967 1 0.995 0.776\n", - " knife 128 16 1 0.664 0.914 0.722\n", - " spoon 128 22 0.951 0.879 0.922 0.744\n", - " bowl 128 28 1 0.815 0.926 0.765\n", - " banana 128 1 0.821 1 0.995 0.995\n", - " sandwich 128 2 0.882 1 0.995 0.995\n", - " orange 128 4 0.938 1 0.995 0.869\n", - " broccoli 128 11 0.87 0.818 0.861 0.677\n", - " carrot 128 24 0.957 0.922 0.992 0.786\n", - " hot dog 128 2 0.88 1 0.995 0.995\n", - " pizza 128 5 0.946 1 0.995 0.978\n", - " donut 128 14 0.938 1 0.995 0.982\n", - " cake 128 4 0.814 1 0.995 0.931\n", - " chair 128 35 1 0.709 0.957 0.761\n", - " couch 128 6 0.959 1 0.995 0.923\n", - " potted plant 128 14 0.976 1 0.995 0.873\n", - " bed 128 3 0.926 1 0.995 0.995\n", - " dining table 128 13 0.919 0.878 0.918 0.799\n", - " toilet 128 2 0.892 1 0.995 0.949\n", - " tv 128 2 0.872 1 0.995 0.995\n", - " laptop 128 3 1 0.838 0.995 0.895\n", - " mouse 128 2 0.868 0.5 0.828 0.562\n", - " remote 128 8 0.95 0.625 0.829 0.639\n", - " cell phone 128 8 1 0.722 0.928 0.714\n", - " microwave 128 3 0.914 1 0.995 0.964\n", - " oven 128 5 0.967 1 0.995 0.912\n", - " sink 128 6 1 0.732 0.856 0.754\n", - " refrigerator 128 5 0.986 1 0.995 0.908\n", - " book 128 29 0.945 0.59 0.872 0.59\n", - " clock 128 9 1 0.983 0.995 0.882\n", - " vase 128 2 0.898 1 0.995 0.995\n", - " scissors 128 1 0.82 1 0.995 0.895\n", - " teddy bear 128 21 0.964 1 0.995 0.853\n", - " toothbrush 128 5 0.947 1 0.995 0.963\n", - "Speed: 0.2ms preprocess, 13.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_9_post_val22\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 14.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_9_post_val23\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -4263,84 +1928,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 33209910 parameters, 74176 gradients, 126.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.855 0.928 0.782\n", - " person 128 254 0.981 0.798 0.913 0.739\n", - " bicycle 128 6 0.93 0.667 0.764 0.594\n", - " car 128 46 1 0.458 0.672 0.352\n", - " motorcycle 128 5 0.955 1 0.995 0.937\n", - " airplane 128 6 0.968 1 0.995 0.863\n", - " bus 128 7 1 0.923 0.995 0.823\n", - " train 128 3 1 0.921 0.995 0.905\n", - " truck 128 12 0.998 0.75 0.931 0.565\n", - " boat 128 6 0.961 0.833 0.955 0.719\n", - " traffic light 128 14 0.728 0.5 0.559 0.314\n", - " stop sign 128 2 0.869 1 0.995 0.849\n", - " bench 128 9 0.981 1 0.995 0.81\n", - " bird 128 16 0.936 1 0.995 0.863\n", - " cat 128 4 0.919 1 0.995 0.967\n", - " dog 128 9 1 0.991 0.995 0.918\n", - " horse 128 2 0.87 1 0.995 0.946\n", - " elephant 128 17 0.986 1 0.995 0.895\n", - " bear 128 1 0.793 1 0.995 0.995\n", - " zebra 128 4 0.927 1 0.995 0.951\n", - " giraffe 128 9 0.963 1 0.995 0.895\n", - " backpack 128 6 1 0.778 0.843 0.706\n", - " umbrella 128 18 1 0.863 0.995 0.858\n", - " handbag 128 19 0.968 0.737 0.829 0.65\n", - " tie 128 7 0.958 0.857 0.879 0.786\n", - " suitcase 128 4 0.934 1 0.995 0.862\n", - " frisbee 128 5 0.939 0.8 0.813 0.733\n", - " skis 128 1 0.826 1 0.995 0.895\n", - " snowboard 128 7 0.963 0.857 0.877 0.725\n", - " sports ball 128 6 1 0.421 0.705 0.353\n", - " kite 128 10 1 0.857 0.931 0.515\n", - " baseball bat 128 4 1 0.655 0.945 0.677\n", - " baseball glove 128 7 0.91 0.429 0.479 0.381\n", - " skateboard 128 5 0.963 1 0.995 0.751\n", - " tennis racket 128 7 0.955 0.714 0.722 0.56\n", - " bottle 128 18 0.907 0.5 0.883 0.577\n", - " wine glass 128 16 0.895 0.537 0.83 0.611\n", - " cup 128 36 0.988 0.889 0.973 0.773\n", - " fork 128 6 0.852 0.833 0.931 0.715\n", - " knife 128 16 1 0.562 0.883 0.611\n", - " spoon 128 22 0.951 0.888 0.909 0.711\n", - " bowl 128 28 1 0.814 0.928 0.79\n", - " banana 128 1 0.794 1 0.995 0.895\n", - " sandwich 128 2 0.864 1 0.995 0.947\n", - " orange 128 4 0.965 1 0.995 0.875\n", - " broccoli 128 11 0.957 0.818 0.861 0.692\n", - " carrot 128 24 1 0.843 0.982 0.78\n", - " hot dog 128 2 0.861 1 0.995 0.995\n", - " pizza 128 5 0.935 1 0.995 0.973\n", - " donut 128 14 0.922 1 0.995 0.973\n", - " cake 128 4 0.922 1 0.995 0.909\n", - " chair 128 35 0.961 0.705 0.969 0.761\n", - " couch 128 6 0.95 1 0.995 0.931\n", - " potted plant 128 14 1 0.916 0.995 0.858\n", - " bed 128 3 0.909 1 0.995 0.995\n", - " dining table 128 13 0.92 0.889 0.943 0.827\n", - " toilet 128 2 0.877 1 0.995 0.949\n", - " tv 128 2 0.859 1 0.995 0.995\n", - " laptop 128 3 0.808 0.667 0.913 0.816\n", - " mouse 128 2 0.827 0.5 0.828 0.587\n", - " remote 128 8 0.942 0.625 0.815 0.629\n", - " cell phone 128 8 0.911 0.625 0.836 0.669\n", - " microwave 128 3 0.884 1 0.995 0.9\n", - " oven 128 5 0.972 1 0.995 0.898\n", - " sink 128 6 1 0.744 0.865 0.665\n", - " refrigerator 128 5 0.994 1 0.995 0.908\n", - " book 128 29 0.857 0.62 0.883 0.58\n", - " clock 128 9 1 0.938 0.995 0.843\n", - " vase 128 2 0.875 1 0.995 0.947\n", - " scissors 128 1 0.793 1 0.995 0.796\n", - " teddy bear 128 21 0.985 1 0.995 0.853\n", - " toothbrush 128 5 0.937 1 0.995 0.964\n", - "Speed: 0.2ms preprocess, 14.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_10_pre_val16\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_10_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_10_finetune16\n", + "Speed: 0.2ms preprocess, 13.6ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_10_pre_val17\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_10_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_10_finetune17\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -4359,149 +1953,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_10_finetune16/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_10_finetune17/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_10_finetune16\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_10_finetune17\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 10.1G 0.5909 0.3739 0.911 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.3G 0.5909 0.3739 0.911 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.938 0.863 0.931 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.3G 0.4459 0.2951 0.8389 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 11.9G 0.4459 0.2951 0.8389 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.87 0.932 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 10.2G 0.5232 0.3395 0.8787 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 11.8G 0.5232 0.3395 0.8787 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.946 0.875 0.935 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.3G 0.4976 0.3318 0.877 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.8G 0.4976 0.3318 0.877 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.878 0.934 0.795\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.3G 0.5079 0.3425 0.8612 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 11.8G 0.5079 0.3425 0.8612 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.946 0.875 0.934 0.798\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10.2G 0.5287 0.3422 0.8902 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 11.8G 0.5287 0.3422 0.8902 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.951 0.875 0.937 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.3G 0.5654 0.3632 0.8837 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 11.9G 0.5654 0.3632 0.8837 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.951 0.879 0.938 0.803\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.3G 0.5918 0.3874 0.9027 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 11.9G 0.5918 0.3874 0.9027 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.948 0.878 0.937 0.802\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.3G 0.6008 0.3761 0.914 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 11.9G 0.6008 0.3761 0.914 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.953 0.876 0.939 0.807\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10.3G 0.6525 0.4039 0.9107 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 11.8G 0.6525 0.4039 0.9107 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.951 0.876 0.939 0.806\n", "\n", - "10 epochs completed in 0.020 hours.\n", - "Optimizer stripped from runs/detect/step_10_finetune16/weights/last.pt, 133.4MB\n", - "Optimizer stripped from runs/detect/step_10_finetune16/weights/best.pt, 133.4MB\n", + "10 epochs completed in 0.032 hours.\n", + "Optimizer stripped from runs/detect/step_10_finetune17/weights/last.pt, 133.4MB\n", + "Optimizer stripped from runs/detect/step_10_finetune17/weights/best.pt, 133.4MB\n", "\n", - "Validating runs/detect/step_10_finetune16/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_10_finetune17/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 33209910 parameters, 0 gradients, 126.7 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.952 0.876 0.937 0.807\n", - " person 128 254 0.976 0.814 0.944 0.779\n", - " bicycle 128 6 0.986 0.667 0.714 0.515\n", - " car 128 46 0.793 0.413 0.581 0.329\n", - " motorcycle 128 5 0.953 1 0.995 0.951\n", - " airplane 128 6 0.977 1 0.995 0.93\n", - " bus 128 7 1 0.904 0.995 0.835\n", - " train 128 3 0.928 1 0.995 0.899\n", - " truck 128 12 1 0.679 0.951 0.664\n", - " boat 128 6 0.963 0.833 0.955 0.736\n", - " traffic light 128 14 0.834 0.358 0.49 0.337\n", - " stop sign 128 2 0.929 1 0.995 0.849\n", - " bench 128 9 0.992 1 0.995 0.824\n", - " bird 128 16 0.976 1 0.995 0.879\n", - " cat 128 4 0.951 1 0.995 0.995\n", - " dog 128 9 0.989 1 0.995 0.936\n", - " horse 128 2 0.923 1 0.995 0.949\n", - " elephant 128 17 0.994 1 0.995 0.909\n", - " bear 128 1 0.844 1 0.995 0.995\n", - " zebra 128 4 0.944 1 0.995 0.995\n", - " giraffe 128 9 0.973 1 0.995 0.913\n", - " backpack 128 6 0.963 0.833 0.879 0.71\n", - " umbrella 128 18 1 0.963 0.995 0.886\n", - " handbag 128 19 0.988 0.789 0.881 0.688\n", - " tie 128 7 0.984 0.857 0.895 0.827\n", - " suitcase 128 4 0.953 1 0.995 0.893\n", - " frisbee 128 5 1 0.821 0.995 0.749\n", - " skis 128 1 0.854 1 0.995 0.895\n", - " snowboard 128 7 0.965 0.857 0.886 0.726\n", - " sports ball 128 6 0.978 0.667 0.687 0.384\n", - " kite 128 10 1 0.821 0.978 0.612\n", - " baseball bat 128 4 1 0.793 0.995 0.815\n", - " baseball glove 128 7 0.94 0.429 0.461 0.417\n", - " skateboard 128 5 0.98 1 0.995 0.781\n", - " tennis racket 128 7 0.963 0.714 0.75 0.614\n", - " bottle 128 18 0.923 0.556 0.84 0.587\n", - " wine glass 128 16 0.902 0.688 0.893 0.692\n", - " cup 128 36 1 0.903 0.975 0.819\n", - " fork 128 6 1 0.779 0.995 0.783\n", - " knife 128 16 1 0.749 0.907 0.665\n", - " spoon 128 22 1 0.889 0.941 0.746\n", - " bowl 128 28 0.975 0.857 0.904 0.783\n", - " banana 128 1 0.855 1 0.995 0.995\n", - " sandwich 128 2 0.896 1 0.995 0.995\n", - " orange 128 4 0.935 1 0.995 0.825\n", - " broccoli 128 11 0.818 0.818 0.859 0.626\n", - " carrot 128 24 1 0.957 0.991 0.82\n", - " hot dog 128 2 0.896 1 0.995 0.951\n", - " pizza 128 5 0.948 1 0.995 0.995\n", - " donut 128 14 0.962 1 0.995 0.97\n", - " cake 128 4 0.947 1 0.995 0.923\n", - " chair 128 35 0.992 0.829 0.976 0.799\n", - " couch 128 6 0.959 1 0.995 0.92\n", - " potted plant 128 14 0.985 1 0.995 0.903\n", - " bed 128 3 0.929 1 0.995 0.944\n", - " dining table 128 13 1 0.91 0.96 0.818\n", - " toilet 128 2 0.911 1 0.995 0.995\n", - " tv 128 2 0.888 1 0.995 0.995\n", - " laptop 128 3 1 0.857 0.995 0.895\n", - " mouse 128 2 0.873 0.5 0.745 0.542\n", - " remote 128 8 1 0.66 0.841 0.635\n", - " cell phone 128 8 1 0.772 0.995 0.738\n", - " microwave 128 3 0.951 1 0.995 0.966\n", - " oven 128 5 0.969 1 0.995 0.928\n", - " sink 128 6 1 0.735 0.972 0.723\n", - " refrigerator 128 5 0.969 1 0.995 0.878\n", - " book 128 29 0.94 0.517 0.91 0.623\n", - " clock 128 9 0.985 1 0.995 0.886\n", - " vase 128 2 0.912 1 0.995 0.947\n", - " scissors 128 1 0.842 1 0.995 0.895\n", - " teddy bear 128 21 0.988 1 0.995 0.891\n", - " toothbrush 128 5 0.968 1 0.995 0.98\n", - "Speed: 0.1ms preprocess, 4.7ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_10_finetune16\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.3ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_10_finetune17\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -4518,83 +2041,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 33209910 parameters, 0 gradients, 126.7 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.949 0.873 0.938 0.803\n", - " person 128 254 0.986 0.816 0.944 0.779\n", - " bicycle 128 6 0.976 0.667 0.706 0.53\n", - " car 128 46 0.84 0.413 0.591 0.351\n", - " motorcycle 128 5 0.952 1 0.995 0.95\n", - " airplane 128 6 0.973 1 0.995 0.914\n", - " bus 128 7 1 0.979 0.995 0.814\n", - " train 128 3 0.929 1 0.995 0.899\n", - " truck 128 12 0.864 0.533 0.887 0.621\n", - " boat 128 6 0.955 0.833 0.955 0.743\n", - " traffic light 128 14 1 0.391 0.569 0.343\n", - " stop sign 128 2 0.927 1 0.995 0.849\n", - " bench 128 9 0.993 1 0.995 0.827\n", - " bird 128 16 0.959 1 0.995 0.866\n", - " cat 128 4 0.951 1 0.995 0.967\n", - " dog 128 9 0.989 1 0.995 0.943\n", - " horse 128 2 0.92 1 0.995 0.949\n", - " elephant 128 17 0.994 1 0.995 0.904\n", - " bear 128 1 0.847 1 0.995 0.895\n", - " zebra 128 4 0.877 1 0.995 0.967\n", - " giraffe 128 9 0.974 1 0.995 0.927\n", - " backpack 128 6 0.964 0.833 0.886 0.722\n", - " umbrella 128 18 1 0.96 0.995 0.888\n", - " handbag 128 19 1 0.76 0.882 0.653\n", - " tie 128 7 0.986 0.857 0.892 0.824\n", - " suitcase 128 4 0.949 1 0.995 0.892\n", - " frisbee 128 5 1 0.819 0.995 0.762\n", - " skis 128 1 0.859 1 0.995 0.895\n", - " snowboard 128 7 0.839 0.857 0.881 0.705\n", - " sports ball 128 6 0.975 0.667 0.762 0.389\n", - " kite 128 10 1 0.827 0.962 0.603\n", - " baseball bat 128 4 1 0.779 0.995 0.743\n", - " baseball glove 128 7 0.942 0.429 0.463 0.418\n", - " skateboard 128 5 0.977 1 0.995 0.781\n", - " tennis racket 128 7 0.963 0.714 0.839 0.623\n", - " bottle 128 18 0.95 0.556 0.853 0.574\n", - " wine glass 128 16 0.907 0.688 0.886 0.694\n", - " cup 128 36 0.97 0.886 0.969 0.804\n", - " fork 128 6 0.992 0.833 0.972 0.76\n", - " knife 128 16 0.923 0.748 0.835 0.63\n", - " spoon 128 22 0.981 0.864 0.939 0.741\n", - " bowl 128 28 0.992 0.857 0.903 0.775\n", - " banana 128 1 0.865 1 0.995 0.995\n", - " sandwich 128 2 0.902 1 0.995 0.995\n", - " orange 128 4 0.931 1 0.995 0.79\n", - " broccoli 128 11 0.89 0.818 0.85 0.633\n", - " carrot 128 24 0.999 0.958 0.989 0.828\n", - " hot dog 128 2 0.898 1 0.995 0.995\n", - " pizza 128 5 0.952 1 0.995 0.96\n", - " donut 128 14 0.944 1 0.995 0.966\n", - " cake 128 4 0.953 1 0.995 0.966\n", - " chair 128 35 1 0.792 0.979 0.786\n", - " couch 128 6 0.94 1 0.995 0.855\n", - " potted plant 128 14 0.986 1 0.995 0.893\n", - " bed 128 3 0.929 1 0.995 0.945\n", - " dining table 128 13 1 0.889 0.968 0.8\n", - " toilet 128 2 0.911 1 0.995 0.995\n", - " tv 128 2 0.894 1 0.995 0.995\n", - " laptop 128 3 1 0.828 0.995 0.931\n", - " mouse 128 2 0.889 0.5 0.828 0.654\n", - " remote 128 8 0.955 0.625 0.824 0.628\n", - " cell phone 128 8 0.993 0.75 0.955 0.709\n", - " microwave 128 3 0.95 1 0.995 0.966\n", - " oven 128 5 0.967 1 0.995 0.935\n", - " sink 128 6 1 0.732 0.972 0.698\n", - " refrigerator 128 5 0.98 1 0.995 0.878\n", - " book 128 29 0.885 0.533 0.893 0.63\n", - " clock 128 9 0.982 1 0.995 0.892\n", - " vase 128 2 0.908 1 0.995 0.895\n", - " scissors 128 1 0.852 1 0.995 0.995\n", - " teddy bear 128 21 0.984 1 0.995 0.893\n", - " toothbrush 128 5 0.966 1 0.995 0.995\n", - "Speed: 0.2ms preprocess, 13.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_10_post_val16\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 14.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_10_post_val17\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -4616,84 +2068,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32703049 parameters, 74176 gradients, 124.6 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.926 0.871 0.929 0.785\n", - " person 128 254 0.981 0.835 0.939 0.765\n", - " bicycle 128 6 0.972 0.667 0.74 0.583\n", - " car 128 46 0.784 0.5 0.608 0.353\n", - " motorcycle 128 5 0.929 1 0.995 0.842\n", - " airplane 128 6 0.933 1 0.995 0.917\n", - " bus 128 7 0.993 1 0.995 0.818\n", - " train 128 3 0.698 1 0.995 0.9\n", - " truck 128 12 1 0.774 0.921 0.634\n", - " boat 128 6 0.918 0.833 0.972 0.701\n", - " traffic light 128 14 0.822 0.429 0.486 0.332\n", - " stop sign 128 2 0.89 1 0.995 0.802\n", - " bench 128 9 0.963 1 0.995 0.831\n", - " bird 128 16 0.935 1 0.995 0.84\n", - " cat 128 4 0.922 1 0.995 0.924\n", - " dog 128 9 0.999 1 0.995 0.944\n", - " horse 128 2 0.87 1 0.995 0.949\n", - " elephant 128 17 0.97 1 0.995 0.882\n", - " bear 128 1 0.78 1 0.995 0.995\n", - " zebra 128 4 0.92 1 0.995 0.968\n", - " giraffe 128 9 0.959 1 0.995 0.906\n", - " backpack 128 6 0.935 0.833 0.881 0.684\n", - " umbrella 128 18 0.992 1 0.995 0.828\n", - " handbag 128 19 0.997 0.737 0.859 0.64\n", - " tie 128 7 0.961 0.857 0.889 0.817\n", - " suitcase 128 4 0.91 1 0.995 0.827\n", - " frisbee 128 5 0.994 0.8 0.938 0.754\n", - " skis 128 1 0.797 1 0.995 0.995\n", - " snowboard 128 7 0.827 0.857 0.839 0.62\n", - " sports ball 128 6 1 0.603 0.71 0.368\n", - " kite 128 10 1 0.686 0.938 0.593\n", - " baseball bat 128 4 1 0.876 0.995 0.656\n", - " baseball glove 128 7 0.902 0.429 0.448 0.374\n", - " skateboard 128 5 0.955 1 0.995 0.753\n", - " tennis racket 128 7 1 0.676 0.722 0.552\n", - " bottle 128 18 1 0.532 0.797 0.538\n", - " wine glass 128 16 0.804 0.769 0.908 0.668\n", - " cup 128 36 0.97 0.909 0.971 0.788\n", - " fork 128 6 0.945 0.833 0.972 0.693\n", - " knife 128 16 0.908 0.616 0.868 0.626\n", - " spoon 128 22 0.978 0.818 0.911 0.723\n", - " bowl 128 28 0.922 0.893 0.912 0.77\n", - " banana 128 1 0.791 1 0.995 0.995\n", - " sandwich 128 2 0.856 1 0.995 0.995\n", - " orange 128 4 0.934 1 0.995 0.836\n", - " broccoli 128 11 0.792 0.818 0.854 0.661\n", - " carrot 128 24 0.959 0.981 0.993 0.802\n", - " hot dog 128 2 0.855 1 0.995 0.995\n", - " pizza 128 5 0.928 1 0.995 0.959\n", - " donut 128 14 0.967 1 0.995 0.961\n", - " cake 128 4 0.927 1 0.995 0.935\n", - " chair 128 35 0.961 0.711 0.95 0.752\n", - " couch 128 6 0.94 1 0.995 0.879\n", - " potted plant 128 14 0.927 1 0.99 0.908\n", - " bed 128 3 0.895 1 0.995 0.945\n", - " dining table 128 13 0.921 0.894 0.924 0.777\n", - " toilet 128 2 0.875 1 0.995 0.947\n", - " tv 128 2 0.845 1 0.995 0.995\n", - " laptop 128 3 1 0.977 0.995 0.895\n", - " mouse 128 2 1 0 0.663 0.334\n", - " remote 128 8 0.938 0.625 0.822 0.598\n", - " cell phone 128 8 1 0.725 0.946 0.686\n", - " microwave 128 3 0.917 1 0.995 0.964\n", - " oven 128 5 0.973 1 0.995 0.959\n", - " sink 128 6 1 0.741 0.972 0.694\n", - " refrigerator 128 5 1 0.977 0.995 0.858\n", - " book 128 29 0.947 0.621 0.834 0.59\n", - " clock 128 9 0.983 1 0.995 0.912\n", - " vase 128 2 0.869 1 0.995 0.922\n", - " scissors 128 1 0.788 1 0.995 0.995\n", - " teddy bear 128 21 0.983 1 0.995 0.893\n", - " toothbrush 128 5 0.936 1 0.995 0.943\n", - "Speed: 0.2ms preprocess, 14.3ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_11_pre_val14\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_11_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_11_finetune14\n", + "Speed: 0.1ms preprocess, 15.1ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_11_pre_val15\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_11_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_11_finetune15\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -4712,149 +2093,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_11_finetune14/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_11_finetune15/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_11_finetune14\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_11_finetune15\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 10.7G 0.592 0.3808 0.9108 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.4G 0.592 0.3808 0.9108 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.912 0.894 0.937 0.794\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.2G 0.4274 0.2822 0.8394 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 12.5G 0.4274 0.2822 0.8394 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.936 0.88 0.94 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 10.1G 0.4962 0.3279 0.8649 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 11.6G 0.4962 0.3279 0.8649 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.885 0.942 0.807\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.2G 0.4768 0.3227 0.8737 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.6G 0.4768 0.3227 0.8737 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.946 0.877 0.938 0.807\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10.1G 0.4901 0.3294 0.8562 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 11.5G 0.4901 0.3294 0.8562 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.884 0.939 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10.1G 0.5087 0.3373 0.8861 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 11.5G 0.5087 0.3373 0.8861 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.883 0.939 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.1G 0.5481 0.3556 0.8798 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 11.5G 0.5481 0.3556 0.8798 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.919 0.89 0.936 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10.2G 0.5694 0.3718 0.8979 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 11.6G 0.5694 0.3718 0.8979 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.937 0.888 0.938 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.2G 0.5756 0.3754 0.9038 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 11.6G 0.5756 0.3754 0.9038 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.886 0.939 0.806\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10.1G 0.6536 0.3981 0.9224 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 11.5G 0.6536 0.3981 0.9224 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.95 0.883 0.939 0.809\n", "\n", "10 epochs completed in 0.030 hours.\n", - "Optimizer stripped from runs/detect/step_11_finetune14/weights/last.pt, 131.4MB\n", - "Optimizer stripped from runs/detect/step_11_finetune14/weights/best.pt, 131.4MB\n", + "Optimizer stripped from runs/detect/step_11_finetune15/weights/last.pt, 131.4MB\n", + "Optimizer stripped from runs/detect/step_11_finetune15/weights/best.pt, 131.4MB\n", "\n", - "Validating runs/detect/step_11_finetune14/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_11_finetune15/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 32703049 parameters, 0 gradients, 124.6 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.883 0.939 0.809\n", - " person 128 254 0.978 0.835 0.938 0.776\n", - " bicycle 128 6 1 0.602 0.716 0.441\n", - " car 128 46 1 0.476 0.646 0.399\n", - " motorcycle 128 5 0.935 1 0.995 0.898\n", - " airplane 128 6 0.969 1 0.995 0.91\n", - " bus 128 7 1 0.805 0.995 0.837\n", - " train 128 3 0.902 1 0.995 0.995\n", - " truck 128 12 0.91 0.842 0.951 0.664\n", - " boat 128 6 1 0.876 0.995 0.871\n", - " traffic light 128 14 0.678 0.429 0.423 0.318\n", - " stop sign 128 2 0.883 1 0.995 0.824\n", - " bench 128 9 1 0.998 0.995 0.838\n", - " bird 128 16 0.97 1 0.995 0.848\n", - " cat 128 4 0.924 1 0.995 0.962\n", - " dog 128 9 1 0.979 0.995 0.948\n", - " horse 128 2 0.881 1 0.995 0.95\n", - " elephant 128 17 0.983 1 0.995 0.883\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.895 1 0.995 0.968\n", - " giraffe 128 9 0.957 1 0.995 0.934\n", - " backpack 128 6 0.93 0.833 0.877 0.742\n", - " umbrella 128 18 0.946 0.965 0.983 0.842\n", - " handbag 128 19 1 0.753 0.891 0.696\n", - " tie 128 7 0.953 0.857 0.895 0.781\n", - " suitcase 128 4 0.934 1 0.995 0.869\n", - " frisbee 128 5 1 0.837 0.995 0.8\n", - " skis 128 1 0.821 1 0.995 0.995\n", - " snowboard 128 7 0.995 0.857 0.879 0.718\n", - " sports ball 128 6 1 0.626 0.751 0.307\n", - " kite 128 10 1 0.819 0.962 0.646\n", - " baseball bat 128 4 0.916 0.75 0.945 0.68\n", - " baseball glove 128 7 0.92 0.429 0.466 0.387\n", - " skateboard 128 5 0.971 1 0.995 0.753\n", - " tennis racket 128 7 1 0.677 0.862 0.633\n", - " bottle 128 18 0.981 0.556 0.782 0.594\n", - " wine glass 128 16 0.92 0.716 0.96 0.684\n", - " cup 128 36 0.991 0.917 0.976 0.799\n", - " fork 128 6 0.946 1 0.995 0.789\n", - " knife 128 16 1 0.793 0.925 0.702\n", - " spoon 128 22 0.993 0.909 0.931 0.739\n", - " bowl 128 28 1 0.859 0.93 0.787\n", - " banana 128 1 0.82 1 0.995 0.995\n", - " sandwich 128 2 0.863 1 0.995 0.995\n", - " orange 128 4 0.872 1 0.995 0.924\n", - " broccoli 128 11 0.814 0.818 0.908 0.696\n", - " carrot 128 24 0.933 1 0.99 0.794\n", - " hot dog 128 2 0.869 1 0.995 0.995\n", - " pizza 128 5 0.936 1 0.995 0.961\n", - " donut 128 14 0.959 1 0.995 0.961\n", - " cake 128 4 0.928 1 0.995 0.995\n", - " chair 128 35 0.966 0.803 0.968 0.783\n", - " couch 128 6 0.954 1 0.995 0.906\n", - " potted plant 128 14 0.985 1 0.995 0.887\n", - " bed 128 3 0.904 1 0.995 0.995\n", - " dining table 128 13 0.923 0.916 0.925 0.785\n", - " toilet 128 2 0.885 1 0.995 0.949\n", - " tv 128 2 0.87 1 0.995 0.949\n", - " laptop 128 3 0.992 1 0.995 0.995\n", - " mouse 128 2 0.764 0.5 0.745 0.585\n", - " remote 128 8 1 0.687 0.815 0.649\n", - " cell phone 128 8 1 0.697 0.949 0.681\n", - " microwave 128 3 0.916 1 0.995 0.933\n", - " oven 128 5 0.978 1 0.995 0.94\n", - " sink 128 6 1 0.762 0.955 0.727\n", - " refrigerator 128 5 0.934 1 0.995 0.911\n", - " book 128 29 1 0.507 0.92 0.622\n", - " clock 128 9 0.98 1 0.995 0.892\n", - " vase 128 2 0.9 1 0.995 0.895\n", - " scissors 128 1 0.786 1 0.995 0.995\n", - " teddy bear 128 21 0.985 1 0.995 0.908\n", - " toothbrush 128 5 0.951 1 0.995 0.928\n", - "Speed: 0.1ms preprocess, 4.9ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_11_finetune14\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.4ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_11_finetune15\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -4871,83 +2181,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32703049 parameters, 0 gradients, 124.6 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.885 0.938 0.803\n", - " person 128 254 0.981 0.846 0.94 0.773\n", - " bicycle 128 6 1 0.614 0.716 0.538\n", - " car 128 46 0.957 0.48 0.656 0.389\n", - " motorcycle 128 5 0.934 1 0.995 0.918\n", - " airplane 128 6 0.966 1 0.995 0.921\n", - " bus 128 7 1 0.887 0.995 0.852\n", - " train 128 3 0.717 1 0.995 0.932\n", - " truck 128 12 0.911 0.857 0.944 0.658\n", - " boat 128 6 1 0.885 0.995 0.819\n", - " traffic light 128 14 0.768 0.5 0.498 0.322\n", - " stop sign 128 2 0.88 1 0.995 0.825\n", - " bench 128 9 0.955 1 0.995 0.854\n", - " bird 128 16 0.957 1 0.995 0.828\n", - " cat 128 4 0.92 1 0.995 0.974\n", - " dog 128 9 1 0.994 0.995 0.944\n", - " horse 128 2 0.875 1 0.995 0.95\n", - " elephant 128 17 0.981 1 0.995 0.878\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.923 1 0.995 0.995\n", - " giraffe 128 9 0.949 1 0.995 0.902\n", - " backpack 128 6 0.924 0.833 0.869 0.746\n", - " umbrella 128 18 0.895 0.949 0.974 0.834\n", - " handbag 128 19 1 0.742 0.894 0.689\n", - " tie 128 7 0.952 0.857 0.895 0.792\n", - " suitcase 128 4 0.93 1 0.995 0.867\n", - " frisbee 128 5 1 0.85 0.995 0.8\n", - " skis 128 1 0.816 1 0.995 0.995\n", - " snowboard 128 7 0.988 0.857 0.883 0.741\n", - " sports ball 128 6 0.993 0.667 0.689 0.335\n", - " kite 128 10 1 0.812 0.954 0.643\n", - " baseball bat 128 4 0.938 0.75 0.945 0.701\n", - " baseball glove 128 7 0.916 0.429 0.468 0.389\n", - " skateboard 128 5 0.967 1 0.995 0.752\n", - " tennis racket 128 7 1 0.659 0.842 0.644\n", - " bottle 128 18 0.978 0.556 0.811 0.57\n", - " wine glass 128 16 0.922 0.741 0.967 0.682\n", - " cup 128 36 0.971 0.922 0.974 0.79\n", - " fork 128 6 0.871 1 0.995 0.775\n", - " knife 128 16 1 0.814 0.931 0.693\n", - " spoon 128 22 1 0.877 0.934 0.734\n", - " bowl 128 28 0.957 0.794 0.895 0.762\n", - " banana 128 1 0.819 1 0.995 0.995\n", - " sandwich 128 2 0.863 1 0.995 0.995\n", - " orange 128 4 0.889 1 0.995 0.867\n", - " broccoli 128 11 0.881 0.818 0.877 0.642\n", - " carrot 128 24 0.947 1 0.99 0.809\n", - " hot dog 128 2 0.867 1 0.995 0.995\n", - " pizza 128 5 0.939 1 0.995 0.959\n", - " donut 128 14 0.933 1 0.995 0.96\n", - " cake 128 4 0.924 1 0.995 0.995\n", - " chair 128 35 0.902 0.829 0.958 0.766\n", - " couch 128 6 0.993 1 0.995 0.853\n", - " potted plant 128 14 0.981 1 0.995 0.885\n", - " bed 128 3 0.898 1 0.995 0.995\n", - " dining table 128 13 0.922 0.911 0.924 0.763\n", - " toilet 128 2 0.891 1 0.995 0.948\n", - " tv 128 2 0.862 1 0.995 0.895\n", - " laptop 128 3 1 0.974 0.995 0.995\n", - " mouse 128 2 0.625 0.5 0.745 0.531\n", - " remote 128 8 1 0.689 0.807 0.631\n", - " cell phone 128 8 1 0.697 0.962 0.711\n", - " microwave 128 3 0.914 1 0.995 0.966\n", - " oven 128 5 0.957 1 0.995 0.939\n", - " sink 128 6 1 0.763 0.972 0.732\n", - " refrigerator 128 5 0.923 1 0.995 0.906\n", - " book 128 29 0.936 0.504 0.908 0.609\n", - " clock 128 9 0.986 1 0.995 0.88\n", - " vase 128 2 0.899 1 0.995 0.895\n", - " scissors 128 1 0.792 1 0.995 0.895\n", - " teddy bear 128 21 0.989 1 0.995 0.896\n", - " toothbrush 128 5 0.949 1 0.995 0.895\n", - "Speed: 0.2ms preprocess, 15.0ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_11_post_val13\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 15.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_11_post_val14\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -4969,84 +2208,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32669140 parameters, 74176 gradients, 124.6 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.883 0.942 0.806\n", - " person 128 254 0.991 0.832 0.943 0.764\n", - " bicycle 128 6 1 0.598 0.721 0.551\n", - " car 128 46 0.95 0.409 0.636 0.37\n", - " motorcycle 128 5 0.954 1 0.995 0.927\n", - " airplane 128 6 0.958 1 0.995 0.949\n", - " bus 128 7 0.979 1 0.995 0.851\n", - " train 128 3 0.918 1 0.995 0.915\n", - " truck 128 12 1 0.664 0.947 0.659\n", - " boat 128 6 1 0.84 0.995 0.782\n", - " traffic light 128 14 0.866 0.462 0.575 0.358\n", - " stop sign 128 2 0.908 1 0.995 0.824\n", - " bench 128 9 0.968 1 0.995 0.877\n", - " bird 128 16 0.984 1 0.995 0.881\n", - " cat 128 4 0.935 1 0.995 0.95\n", - " dog 128 9 1 0.987 0.995 0.962\n", - " horse 128 2 0.901 1 0.995 0.949\n", - " elephant 128 17 0.986 1 0.995 0.892\n", - " bear 128 1 0.831 1 0.995 0.895\n", - " zebra 128 4 0.937 1 0.995 0.995\n", - " giraffe 128 9 0.968 1 0.995 0.907\n", - " backpack 128 6 0.941 0.833 0.879 0.747\n", - " umbrella 128 18 0.943 0.918 0.965 0.817\n", - " handbag 128 19 1 0.818 0.894 0.688\n", - " tie 128 7 0.961 0.857 0.892 0.794\n", - " suitcase 128 4 0.94 1 0.995 0.84\n", - " frisbee 128 5 1 0.835 0.995 0.791\n", - " skis 128 1 0.848 1 0.995 0.895\n", - " snowboard 128 7 0.961 0.857 0.878 0.789\n", - " sports ball 128 6 1 0.612 0.741 0.373\n", - " kite 128 10 1 0.884 0.954 0.609\n", - " baseball bat 128 4 0.883 0.75 0.945 0.657\n", - " baseball glove 128 7 0.941 0.429 0.464 0.359\n", - " skateboard 128 5 0.964 1 0.995 0.814\n", - " tennis racket 128 7 1 0.736 0.861 0.637\n", - " bottle 128 18 0.968 0.556 0.841 0.613\n", - " wine glass 128 16 0.912 0.645 0.959 0.661\n", - " cup 128 36 0.942 0.894 0.958 0.795\n", - " fork 128 6 0.941 1 0.995 0.768\n", - " knife 128 16 1 0.674 0.932 0.736\n", - " spoon 128 22 0.987 0.864 0.943 0.775\n", - " bowl 128 28 0.963 0.786 0.9 0.769\n", - " banana 128 1 0.835 1 0.995 0.995\n", - " sandwich 128 2 0.885 1 0.995 0.995\n", - " orange 128 4 0.895 1 0.995 0.895\n", - " broccoli 128 11 0.933 0.818 0.851 0.641\n", - " carrot 128 24 0.958 0.939 0.992 0.795\n", - " hot dog 128 2 0.884 1 0.995 0.995\n", - " pizza 128 5 0.949 1 0.995 0.952\n", - " donut 128 14 0.935 1 0.995 0.986\n", - " cake 128 4 0.943 1 0.995 0.955\n", - " chair 128 35 0.936 0.839 0.97 0.78\n", - " couch 128 6 0.95 1 0.995 0.915\n", - " potted plant 128 14 0.994 1 0.995 0.881\n", - " bed 128 3 0.917 1 0.995 0.945\n", - " dining table 128 13 1 0.907 0.944 0.808\n", - " toilet 128 2 0.905 1 0.995 0.949\n", - " tv 128 2 0.885 1 0.995 0.951\n", - " laptop 128 3 0.966 1 0.995 0.964\n", - " mouse 128 2 0.812 0.5 0.828 0.588\n", - " remote 128 8 0.976 0.75 0.823 0.64\n", - " cell phone 128 8 1 0.762 0.955 0.733\n", - " microwave 128 3 0.927 1 0.995 0.936\n", - " oven 128 5 0.965 1 0.995 0.959\n", - " sink 128 6 1 0.738 0.922 0.725\n", - " refrigerator 128 5 0.938 1 0.995 0.901\n", - " book 128 29 0.953 0.695 0.941 0.619\n", - " clock 128 9 0.981 1 0.995 0.906\n", - " vase 128 2 0.91 1 0.995 0.922\n", - " scissors 128 1 0.828 1 0.995 0.895\n", - " teddy bear 128 21 0.985 1 0.995 0.869\n", - " toothbrush 128 5 0.953 1 0.995 0.938\n", - "Speed: 0.2ms preprocess, 15.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_12_pre_val13\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_12_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_12_finetune12\n", + "Speed: 0.1ms preprocess, 15.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_12_pre_val14\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_12_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_12_finetune13\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -5065,148 +2233,77 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_12_finetune12/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_12_finetune13/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_12_finetune12\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_12_finetune13\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 9.99G 0.499 0.3319 0.8801 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.7G 0.499 0.3319 0.8801 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.933 0.887 0.937 0.809\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10G 0.3689 0.2572 0.8209 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 11.6G 0.3689 0.2572 0.8209 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.932 0.886 0.938 0.812\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 9.99G 0.4539 0.302 0.8599 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 11.6G 0.4539 0.302 0.8599 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.949 0.876 0.938 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10G 0.4334 0.2969 0.8595 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.6G 0.4334 0.2969 0.8595 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.94 0.893 0.941 0.808\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10G 0.4529 0.3123 0.8466 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 11.6G 0.4529 0.3123 0.8466 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.894 0.941 0.807\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10G 0.4631 0.3128 0.8697 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 11.6G 0.4631 0.3128 0.8697 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.95 0.887 0.943 0.809\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.3G 0.5213 0.3408 0.8692 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 11.6G 0.5213 0.3408 0.8692 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.941 0.891 0.939 0.811\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 9.98G 0.539 0.3604 0.8853 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 11.7G 0.539 0.3604 0.8853 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.892 0.94 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10.2G 0.5515 0.358 0.8976 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 11.7G 0.5515 0.358 0.8976 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.929 0.897 0.94 0.813\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10G 0.6402 0.3891 0.9106 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 11.6G 0.6402 0.3891 0.9106 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.941 0.897 0.943 0.816\n", "\n", - "10 epochs completed in 0.020 hours.\n", - "Optimizer stripped from runs/detect/step_12_finetune12/weights/last.pt, 131.3MB\n", - "Optimizer stripped from runs/detect/step_12_finetune12/weights/best.pt, 131.3MB\n", + "10 epochs completed in 0.029 hours.\n", + "Optimizer stripped from runs/detect/step_12_finetune13/weights/last.pt, 131.3MB\n", + "Optimizer stripped from runs/detect/step_12_finetune13/weights/best.pt, 131.3MB\n", "\n", - "Validating runs/detect/step_12_finetune12/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_12_finetune13/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 32669140 parameters, 0 gradients, 124.6 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.942 0.897 0.944 0.816\n", - " person 128 254 0.977 0.831 0.943 0.766\n", - " bicycle 128 6 0.974 0.667 0.776 0.567\n", - " car 128 46 0.928 0.563 0.686 0.386\n", - " motorcycle 128 5 0.948 1 0.995 0.93\n", - " airplane 128 6 0.966 1 0.995 0.92\n", - " bus 128 7 0.97 1 0.995 0.871\n", - " train 128 3 0.91 1 0.995 0.943\n", - " truck 128 12 1 0.773 0.971 0.661\n", - " boat 128 6 1 0.897 0.995 0.796\n", - " traffic light 128 14 0.827 0.344 0.467 0.334\n", - " stop sign 128 2 0.888 1 0.995 0.895\n", - " bench 128 9 0.974 1 0.995 0.864\n", - " bird 128 16 0.936 1 0.995 0.873\n", - " cat 128 4 0.922 1 0.995 0.974\n", - " dog 128 9 0.994 1 0.995 0.968\n", - " horse 128 2 0.909 1 0.995 0.995\n", - " elephant 128 17 0.989 1 0.995 0.914\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.93 1 0.995 0.967\n", - " giraffe 128 9 0.964 1 0.995 0.953\n", - " backpack 128 6 0.942 0.833 0.846 0.706\n", - " umbrella 128 18 0.946 0.982 0.992 0.841\n", - " handbag 128 19 1 0.715 0.844 0.686\n", - " tie 128 7 0.959 0.857 0.886 0.831\n", - " suitcase 128 4 0.959 1 0.995 0.841\n", - " frisbee 128 5 0.958 0.8 0.92 0.733\n", - " skis 128 1 0.826 1 0.995 0.995\n", - " snowboard 128 7 0.968 0.857 0.871 0.696\n", - " sports ball 128 6 1 0.661 0.753 0.395\n", - " kite 128 10 0.98 0.9 0.966 0.634\n", - " baseball bat 128 4 1 0.871 0.995 0.736\n", - " baseball glove 128 7 0.893 0.429 0.444 0.396\n", - " skateboard 128 5 0.956 1 0.995 0.76\n", - " tennis racket 128 7 0.983 0.714 0.828 0.667\n", - " bottle 128 18 1 0.554 0.832 0.621\n", - " wine glass 128 16 0.928 0.806 0.931 0.693\n", - " cup 128 36 1 0.935 0.976 0.831\n", - " fork 128 6 0.938 1 0.995 0.735\n", - " knife 128 16 0.923 0.745 0.916 0.688\n", - " spoon 128 22 0.944 0.909 0.946 0.758\n", - " bowl 128 28 0.998 0.821 0.931 0.791\n", - " banana 128 1 0.816 1 0.995 0.995\n", - " sandwich 128 2 0.871 1 0.995 0.995\n", - " orange 128 4 0.939 1 0.995 0.923\n", - " broccoli 128 11 0.958 0.818 0.891 0.644\n", - " carrot 128 24 0.922 0.987 0.989 0.787\n", - " hot dog 128 2 0.872 1 0.995 0.995\n", - " pizza 128 5 0.943 1 0.995 0.965\n", - " donut 128 14 0.914 1 0.995 0.98\n", - " cake 128 4 0.924 1 0.995 0.995\n", - " chair 128 35 1 0.822 0.99 0.82\n", - " couch 128 6 0.951 1 0.995 0.921\n", - " potted plant 128 14 0.978 1 0.995 0.865\n", - " bed 128 3 0.909 1 0.995 0.995\n", - " dining table 128 13 0.955 0.923 0.932 0.845\n", - " toilet 128 2 0.889 1 0.995 0.949\n", - " tv 128 2 0.883 1 0.995 0.995\n", - " laptop 128 3 0.729 1 0.913 0.857\n", - " mouse 128 2 1 0.679 0.995 0.606\n", - " remote 128 8 1 0.695 0.871 0.67\n", - " cell phone 128 8 1 0.766 0.995 0.703\n", - " microwave 128 3 0.91 1 0.995 0.932\n", - " oven 128 5 0.968 1 0.995 0.886\n", - " sink 128 6 1 0.752 0.955 0.804\n", - " refrigerator 128 5 0.906 1 0.995 0.916\n", - " book 128 29 1 0.76 0.938 0.623\n", - " clock 128 9 0.972 1 0.995 0.883\n", - " vase 128 2 0.884 1 0.995 0.923\n", - " scissors 128 1 0.8 1 0.995 0.995\n", - " teddy bear 128 21 0.907 1 0.989 0.892\n", - " toothbrush 128 5 0.946 1 0.995 0.955\n", - "Speed: 0.1ms preprocess, 4.7ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_12_finetune12\u001b[0m\n" + "Speed: 0.1ms preprocess, 4.2ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_12_finetune13\u001b[0m\n" ] }, { @@ -5222,85 +2319,14 @@ "name": "stderr", "output_type": "stream", "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 32669140 parameters, 0 gradients, 124.6 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.937 0.892 0.941 0.811\n", - " person 128 254 0.968 0.843 0.943 0.765\n", - " bicycle 128 6 0.982 0.667 0.741 0.525\n", - " car 128 46 0.928 0.563 0.671 0.385\n", - " motorcycle 128 5 0.945 1 0.995 0.915\n", - " airplane 128 6 0.965 1 0.995 0.904\n", - " bus 128 7 0.964 1 0.995 0.865\n", - " train 128 3 0.912 1 0.995 0.943\n", - " truck 128 12 1 0.827 0.957 0.678\n", - " boat 128 6 1 0.897 0.995 0.776\n", - " traffic light 128 14 0.844 0.388 0.613 0.338\n", - " stop sign 128 2 0.888 1 0.995 0.895\n", - " bench 128 9 0.982 1 0.995 0.866\n", - " bird 128 16 0.928 1 0.995 0.863\n", - " cat 128 4 0.923 1 0.995 0.973\n", - " dog 128 9 0.987 1 0.995 0.967\n", - " horse 128 2 0.915 1 0.995 0.95\n", - " elephant 128 17 0.989 1 0.995 0.892\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.929 1 0.995 0.995\n", - " giraffe 128 9 0.963 1 0.995 0.927\n", - " backpack 128 6 0.941 0.833 0.844 0.75\n", - " umbrella 128 18 0.897 0.968 0.987 0.844\n", - " handbag 128 19 1 0.717 0.844 0.693\n", - " tie 128 7 0.96 0.857 0.881 0.834\n", - " suitcase 128 4 0.95 1 0.995 0.888\n", - " frisbee 128 5 0.954 0.8 0.938 0.758\n", - " skis 128 1 0.828 1 0.995 0.995\n", - " snowboard 128 7 0.966 0.857 0.873 0.71\n", - " sports ball 128 6 1 0.621 0.756 0.387\n", - " kite 128 10 0.919 0.9 0.954 0.633\n", - " baseball bat 128 4 1 0.88 0.995 0.744\n", - " baseball glove 128 7 0.899 0.429 0.443 0.395\n", - " skateboard 128 5 0.953 1 0.995 0.778\n", - " tennis racket 128 7 0.977 0.714 0.827 0.623\n", - " bottle 128 18 1 0.541 0.814 0.591\n", - " wine glass 128 16 0.86 0.771 0.937 0.692\n", - " cup 128 36 0.999 0.944 0.976 0.809\n", - " fork 128 6 0.91 1 0.995 0.732\n", - " knife 128 16 0.92 0.723 0.916 0.666\n", - " spoon 128 22 0.981 0.909 0.94 0.745\n", - " bowl 128 28 1 0.821 0.898 0.765\n", - " banana 128 1 0.819 1 0.995 0.995\n", - " sandwich 128 2 0.873 1 0.995 0.995\n", - " orange 128 4 0.937 1 0.995 0.865\n", - " broccoli 128 11 0.837 0.818 0.883 0.662\n", - " carrot 128 24 0.922 0.984 0.989 0.781\n", - " hot dog 128 2 0.871 1 0.995 0.995\n", - " pizza 128 5 0.943 1 0.995 0.974\n", - " donut 128 14 0.906 1 0.995 0.974\n", - " cake 128 4 0.925 1 0.995 0.954\n", - " chair 128 35 0.984 0.857 0.983 0.807\n", - " couch 128 6 0.947 1 0.995 0.92\n", - " potted plant 128 14 0.95 1 0.995 0.87\n", - " bed 128 3 0.91 1 0.995 0.995\n", - " dining table 128 13 1 0.918 0.958 0.828\n", - " toilet 128 2 0.893 1 0.995 0.948\n", - " tv 128 2 0.881 1 0.995 0.995\n", - " laptop 128 3 0.838 1 0.995 0.963\n", - " mouse 128 2 0.831 0.5 0.695 0.467\n", - " remote 128 8 1 0.684 0.846 0.656\n", - " cell phone 128 8 1 0.622 0.995 0.712\n", - " microwave 128 3 0.904 1 0.995 0.965\n", - " oven 128 5 0.96 1 0.995 0.883\n", - " sink 128 6 1 0.75 0.955 0.764\n", - " refrigerator 128 5 0.946 1 0.995 0.884\n", - " book 128 29 1 0.734 0.934 0.618\n", - " clock 128 9 0.973 1 0.995 0.897\n", - " vase 128 2 0.887 1 0.995 0.922\n", - " scissors 128 1 0.804 1 0.995 0.995\n", - " teddy bear 128 21 0.911 0.981 0.989 0.905\n", - " toothbrush 128 5 0.945 1 0.995 0.956\n", - "Speed: 0.2ms preprocess, 15.8ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_12_post_val12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 16.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_12_post_val13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -5322,84 +2348,13 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 74176 gradients, 123.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.939 0.886 0.94 0.805\n", - " person 128 254 0.967 0.818 0.941 0.76\n", - " bicycle 128 6 1 0.625 0.793 0.538\n", - " car 128 46 0.959 0.51 0.723 0.361\n", - " motorcycle 128 5 0.943 1 0.995 0.922\n", - " airplane 128 6 0.961 1 0.995 0.914\n", - " bus 128 7 0.987 1 0.995 0.87\n", - " train 128 3 0.914 1 0.995 0.943\n", - " truck 128 12 0.903 0.667 0.938 0.696\n", - " boat 128 6 1 0.898 0.995 0.75\n", - " traffic light 128 14 0.842 0.384 0.525 0.317\n", - " stop sign 128 2 0.885 1 0.995 0.895\n", - " bench 128 9 0.928 0.889 0.984 0.862\n", - " bird 128 16 0.965 1 0.995 0.863\n", - " cat 128 4 0.919 1 0.995 0.973\n", - " dog 128 9 0.985 1 0.995 0.929\n", - " horse 128 2 0.912 1 0.995 0.924\n", - " elephant 128 17 0.99 1 0.995 0.876\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.927 1 0.995 0.995\n", - " giraffe 128 9 0.962 1 0.995 0.915\n", - " backpack 128 6 0.941 0.833 0.942 0.738\n", - " umbrella 128 18 0.893 1 0.987 0.85\n", - " handbag 128 19 1 0.704 0.881 0.687\n", - " tie 128 7 0.959 0.857 0.878 0.853\n", - " suitcase 128 4 0.932 1 0.995 0.88\n", - " frisbee 128 5 0.943 0.8 0.821 0.742\n", - " skis 128 1 0.821 1 0.995 0.895\n", - " snowboard 128 7 0.977 0.857 0.873 0.662\n", - " sports ball 128 6 1 0.585 0.739 0.429\n", - " kite 128 10 0.978 0.9 0.954 0.631\n", - " baseball bat 128 4 1 0.868 0.995 0.69\n", - " baseball glove 128 7 0.924 0.429 0.451 0.392\n", - " skateboard 128 5 0.955 1 0.995 0.798\n", - " tennis racket 128 7 0.965 0.714 0.842 0.622\n", - " bottle 128 18 1 0.553 0.822 0.604\n", - " wine glass 128 16 0.929 0.814 0.914 0.666\n", - " cup 128 36 0.944 0.934 0.97 0.812\n", - " fork 128 6 0.914 1 0.995 0.709\n", - " knife 128 16 0.852 0.719 0.923 0.683\n", - " spoon 128 22 0.954 0.909 0.941 0.708\n", - " bowl 128 28 0.985 0.821 0.874 0.763\n", - " banana 128 1 0.814 1 0.995 0.895\n", - " sandwich 128 2 0.868 1 0.995 0.995\n", - " orange 128 4 0.94 1 0.995 0.869\n", - " broccoli 128 11 0.955 0.818 0.894 0.67\n", - " carrot 128 24 0.921 0.969 0.989 0.779\n", - " hot dog 128 2 0.867 1 0.995 0.995\n", - " pizza 128 5 0.938 1 0.995 0.974\n", - " donut 128 14 0.916 1 0.995 0.973\n", - " cake 128 4 0.922 1 0.995 0.954\n", - " chair 128 35 1 0.833 0.973 0.791\n", - " couch 128 6 0.946 1 0.995 0.927\n", - " potted plant 128 14 0.991 1 0.995 0.878\n", - " bed 128 3 0.905 1 0.995 0.995\n", - " dining table 128 13 0.98 0.923 0.941 0.825\n", - " toilet 128 2 0.897 1 0.995 0.995\n", - " tv 128 2 0.875 1 0.995 0.995\n", - " laptop 128 3 0.98 1 0.995 0.964\n", - " mouse 128 2 0.797 0.5 0.662 0.455\n", - " remote 128 8 1 0.684 0.884 0.691\n", - " cell phone 128 8 1 0.704 0.995 0.721\n", - " microwave 128 3 0.887 1 0.995 0.954\n", - " oven 128 5 0.959 1 0.995 0.882\n", - " sink 128 6 1 0.752 0.955 0.746\n", - " refrigerator 128 5 0.94 1 0.995 0.877\n", - " book 128 29 1 0.623 0.928 0.606\n", - " clock 128 9 0.979 1 0.995 0.895\n", - " vase 128 2 0.893 1 0.995 0.922\n", - " scissors 128 1 0.797 1 0.995 0.995\n", - " teddy bear 128 21 0.949 1 0.991 0.89\n", - " toothbrush 128 5 0.944 1 0.995 0.932\n", - "Speed: 0.2ms preprocess, 15.9ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_13_pre_val12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_13_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_13_finetune12\n", + "Speed: 0.2ms preprocess, 16.6ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_13_pre_val13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_13_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_13_finetune13\n", "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" ] @@ -5418,149 +2373,78 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_13_finetune12/labels.jpg... \n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_13_finetune13/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_13_finetune12\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_13_finetune13\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 9.98G 0.5096 0.332 0.8815 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 12.1G 0.5096 0.332 0.8815 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.935 0.891 0.943 0.812\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10G 0.3826 0.2558 0.8262 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 11.6G 0.3826 0.2558 0.8262 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.956 0.88 0.949 0.813\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 9.97G 0.4512 0.3126 0.8555 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 11.4G 0.4512 0.3126 0.8555 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.948 0.883 0.945 0.814\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10.1G 0.4423 0.299 0.8562 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.5G 0.4423 0.299 0.8562 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.95 0.882 0.944 0.811\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10G 0.4557 0.3122 0.8492 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 11.5G 0.4557 0.3122 0.8492 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.935 0.896 0.945 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10G 0.4734 0.3233 0.8728 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 11.5G 0.4734 0.3233 0.8728 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.93 0.892 0.941 0.805\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10.1G 0.5367 0.352 0.8796 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 11.5G 0.5367 0.352 0.8796 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.934 0.886 0.942 0.804\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 10G 0.5403 0.3508 0.8848 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 11.5G 0.5403 0.3508 0.8848 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.941 0.886 0.943 0.81\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 9.99G 0.5416 0.3534 0.889 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 11.5G 0.5416 0.3534 0.889 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.893 0.947 0.814\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10G 0.6465 0.4027 0.9187 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 11.5G 0.6465 0.4027 0.9187 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.945 0.891 0.947 0.818\n", "\n", - "10 epochs completed in 0.019 hours.\n", - "Optimizer stripped from runs/detect/step_13_finetune12/weights/last.pt, 130.3MB\n", - "Optimizer stripped from runs/detect/step_13_finetune12/weights/best.pt, 130.3MB\n", + "10 epochs completed in 0.030 hours.\n", + "Optimizer stripped from runs/detect/step_13_finetune13/weights/last.pt, 130.3MB\n", + "Optimizer stripped from runs/detect/step_13_finetune13/weights/best.pt, 130.3MB\n", "\n", - "Validating runs/detect/step_13_finetune12/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_13_finetune13/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 0 gradients, 123.4 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.891 0.947 0.819\n", - " person 128 254 0.972 0.827 0.945 0.765\n", - " bicycle 128 6 1 0.63 0.737 0.589\n", - " car 128 46 0.953 0.457 0.647 0.352\n", - " motorcycle 128 5 0.946 1 0.995 0.896\n", - " airplane 128 6 0.957 1 0.995 0.951\n", - " bus 128 7 1 0.918 0.995 0.888\n", - " train 128 3 0.931 1 0.995 0.995\n", - " truck 128 12 0.995 0.667 0.901 0.641\n", - " boat 128 6 1 0.977 0.995 0.805\n", - " traffic light 128 14 0.729 0.386 0.492 0.316\n", - " stop sign 128 2 0.905 1 0.995 0.895\n", - " bench 128 9 0.948 1 0.995 0.866\n", - " bird 128 16 0.982 1 0.995 0.863\n", - " cat 128 4 0.947 1 0.995 0.995\n", - " dog 128 9 0.978 1 0.995 0.921\n", - " horse 128 2 0.895 1 0.995 0.995\n", - " elephant 128 17 0.988 1 0.995 0.888\n", - " bear 128 1 0.835 1 0.995 0.995\n", - " zebra 128 4 0.938 1 0.995 0.995\n", - " giraffe 128 9 0.967 1 0.995 0.927\n", - " backpack 128 6 0.931 0.833 0.955 0.78\n", - " umbrella 128 18 0.935 1 0.989 0.856\n", - " handbag 128 19 1 0.78 0.895 0.7\n", - " tie 128 7 0.997 0.857 0.881 0.79\n", - " suitcase 128 4 0.936 1 0.995 0.834\n", - " frisbee 128 5 1 0.844 0.995 0.77\n", - " skis 128 1 0.855 1 0.995 0.796\n", - " snowboard 128 7 0.971 0.857 0.916 0.752\n", - " sports ball 128 6 1 0.596 0.773 0.407\n", - " kite 128 10 0.958 0.9 0.954 0.639\n", - " baseball bat 128 4 1 0.849 0.995 0.763\n", - " baseball glove 128 7 0.973 0.429 0.451 0.373\n", - " skateboard 128 5 0.97 1 0.995 0.839\n", - " tennis racket 128 7 1 0.753 0.864 0.628\n", - " bottle 128 18 0.951 0.556 0.814 0.613\n", - " wine glass 128 16 0.946 0.625 0.917 0.686\n", - " cup 128 36 0.992 0.917 0.975 0.79\n", - " fork 128 6 0.962 1 0.995 0.763\n", - " knife 128 16 0.933 0.688 0.934 0.675\n", - " spoon 128 22 0.99 0.864 0.952 0.8\n", - " bowl 128 28 1 0.812 0.918 0.81\n", - " banana 128 1 0.834 1 0.995 0.995\n", - " sandwich 128 2 0.886 1 0.995 0.995\n", - " orange 128 4 0.94 1 0.995 0.863\n", - " broccoli 128 11 0.826 0.818 0.905 0.676\n", - " carrot 128 24 0.947 0.958 0.99 0.792\n", - " hot dog 128 2 0.889 1 0.995 0.995\n", - " pizza 128 5 0.946 1 0.995 0.974\n", - " donut 128 14 0.963 1 0.995 0.981\n", - " cake 128 4 0.936 1 0.995 0.995\n", - " chair 128 35 0.967 0.848 0.959 0.777\n", - " couch 128 6 0.956 1 0.995 0.931\n", - " potted plant 128 14 0.993 1 0.995 0.893\n", - " bed 128 3 0.924 1 0.995 0.995\n", - " dining table 128 13 0.994 0.923 0.934 0.788\n", - " toilet 128 2 0.923 1 0.995 0.995\n", - " tv 128 2 0.913 1 0.995 0.995\n", - " laptop 128 3 0.999 1 0.995 0.965\n", - " mouse 128 2 1 0.65 0.995 0.665\n", - " remote 128 8 1 0.684 0.857 0.673\n", - " cell phone 128 8 0.995 0.875 0.982 0.744\n", - " microwave 128 3 0.921 1 0.995 0.965\n", - " oven 128 5 0.985 1 0.995 0.953\n", - " sink 128 6 1 0.751 0.972 0.732\n", - " refrigerator 128 5 0.955 1 0.995 0.911\n", - " book 128 29 0.917 0.762 0.933 0.612\n", - " clock 128 9 0.986 1 0.995 0.881\n", - " vase 128 2 0.895 1 0.995 0.995\n", - " scissors 128 1 0.451 1 0.995 0.995\n", - " teddy bear 128 21 0.982 1 0.995 0.904\n", - " toothbrush 128 5 0.965 1 0.995 0.909\n", - "Speed: 0.1ms preprocess, 4.6ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_13_finetune12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.2ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_13_finetune13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -5577,83 +2461,12 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 0 gradients, 123.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.951 0.887 0.946 0.815\n", - " person 128 254 0.972 0.828 0.943 0.764\n", - " bicycle 128 6 1 0.628 0.749 0.574\n", - " car 128 46 0.954 0.455 0.655 0.362\n", - " motorcycle 128 5 0.949 1 0.995 0.92\n", - " airplane 128 6 0.962 1 0.995 0.95\n", - " bus 128 7 0.999 1 0.995 0.859\n", - " train 128 3 0.939 1 0.995 0.966\n", - " truck 128 12 1 0.654 0.882 0.614\n", - " boat 128 6 1 0.944 0.995 0.765\n", - " traffic light 128 14 0.858 0.433 0.615 0.334\n", - " stop sign 128 2 0.911 1 0.995 0.895\n", - " bench 128 9 0.944 1 0.995 0.884\n", - " bird 128 16 0.975 1 0.995 0.862\n", - " cat 128 4 0.941 1 0.995 0.974\n", - " dog 128 9 0.978 1 0.995 0.945\n", - " horse 128 2 0.898 1 0.995 0.995\n", - " elephant 128 17 0.992 1 0.995 0.895\n", - " bear 128 1 0.841 1 0.995 0.995\n", - " zebra 128 4 0.941 1 0.995 0.995\n", - " giraffe 128 9 0.967 1 0.995 0.926\n", - " backpack 128 6 0.947 0.833 0.942 0.763\n", - " umbrella 128 18 0.882 1 0.99 0.846\n", - " handbag 128 19 1 0.769 0.887 0.721\n", - " tie 128 7 0.977 0.857 0.884 0.794\n", - " suitcase 128 4 0.941 1 0.995 0.868\n", - " frisbee 128 5 1 0.844 0.995 0.773\n", - " skis 128 1 0.864 1 0.995 0.895\n", - " snowboard 128 7 0.974 0.857 0.92 0.688\n", - " sports ball 128 6 1 0.588 0.755 0.421\n", - " kite 128 10 0.961 0.9 0.951 0.646\n", - " baseball bat 128 4 1 0.846 0.995 0.676\n", - " baseball glove 128 7 0.96 0.429 0.453 0.369\n", - " skateboard 128 5 0.961 1 0.995 0.84\n", - " tennis racket 128 7 1 0.751 0.862 0.638\n", - " bottle 128 18 0.95 0.556 0.806 0.581\n", - " wine glass 128 16 1 0.666 0.938 0.693\n", - " cup 128 36 0.943 0.918 0.973 0.793\n", - " fork 128 6 0.965 1 0.995 0.78\n", - " knife 128 16 0.931 0.688 0.94 0.673\n", - " spoon 128 22 0.974 0.864 0.95 0.782\n", - " bowl 128 28 0.972 0.786 0.907 0.794\n", - " banana 128 1 0.847 1 0.995 0.895\n", - " sandwich 128 2 0.887 1 0.995 0.995\n", - " orange 128 4 0.942 1 0.995 0.883\n", - " broccoli 128 11 0.826 0.818 0.91 0.65\n", - " carrot 128 24 0.993 0.958 0.992 0.811\n", - " hot dog 128 2 0.894 1 0.995 0.995\n", - " pizza 128 5 0.959 1 0.995 0.978\n", - " donut 128 14 0.961 1 0.995 0.971\n", - " cake 128 4 0.939 1 0.995 0.954\n", - " chair 128 35 0.965 0.791 0.955 0.775\n", - " couch 128 6 0.961 1 0.995 0.928\n", - " potted plant 128 14 0.982 1 0.995 0.898\n", - " bed 128 3 0.924 1 0.995 0.995\n", - " dining table 128 13 0.999 0.923 0.973 0.835\n", - " toilet 128 2 0.934 1 0.995 0.948\n", - " tv 128 2 0.923 1 0.995 0.995\n", - " laptop 128 3 1 0.917 0.995 0.964\n", - " mouse 128 2 0.871 0.5 0.745 0.537\n", - " remote 128 8 1 0.674 0.886 0.7\n", - " cell phone 128 8 1 0.792 0.995 0.767\n", - " microwave 128 3 0.926 1 0.995 0.964\n", - " oven 128 5 0.984 1 0.995 0.94\n", - " sink 128 6 1 0.741 0.955 0.696\n", - " refrigerator 128 5 0.965 1 0.995 0.911\n", - " book 128 29 0.956 0.747 0.93 0.634\n", - " clock 128 9 0.988 1 0.995 0.894\n", - " vase 128 2 0.904 1 0.995 0.995\n", - " scissors 128 1 0.81 1 0.995 0.995\n", - " teddy bear 128 21 0.981 1 0.995 0.91\n", - " toothbrush 128 5 0.966 1 0.995 0.919\n", - "Speed: 0.2ms preprocess, 16.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_13_post_val12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.2ms preprocess, 17.3ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_13_post_val13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -5675,86 +2488,14 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 74176 gradients, 123.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.951 0.886 0.945 0.815\n", - " person 128 254 0.972 0.824 0.942 0.763\n", - " bicycle 128 6 1 0.63 0.749 0.574\n", - " car 128 46 0.954 0.453 0.656 0.361\n", - " motorcycle 128 5 0.949 1 0.995 0.92\n", - " airplane 128 6 0.962 1 0.995 0.951\n", - " bus 128 7 1 0.993 0.995 0.859\n", - " train 128 3 0.94 1 0.995 0.966\n", - " truck 128 12 1 0.655 0.883 0.614\n", - " boat 128 6 1 0.943 0.995 0.765\n", - " traffic light 128 14 0.858 0.431 0.615 0.335\n", - " stop sign 128 2 0.911 1 0.995 0.948\n", - " bench 128 9 0.944 1 0.995 0.883\n", - " bird 128 16 0.975 1 0.995 0.861\n", - " cat 128 4 0.941 1 0.995 0.974\n", - " dog 128 9 0.979 1 0.995 0.945\n", - " horse 128 2 0.899 1 0.995 0.995\n", - " elephant 128 17 0.992 1 0.995 0.895\n", - " bear 128 1 0.842 1 0.995 0.995\n", - " zebra 128 4 0.942 1 0.995 0.995\n", - " giraffe 128 9 0.966 1 0.995 0.926\n", - " backpack 128 6 0.947 0.833 0.942 0.763\n", - " umbrella 128 18 0.882 1 0.99 0.852\n", - " handbag 128 19 1 0.768 0.887 0.714\n", - " tie 128 7 0.977 0.857 0.884 0.794\n", - " suitcase 128 4 0.941 1 0.995 0.868\n", - " frisbee 128 5 1 0.842 0.995 0.773\n", - " skis 128 1 0.865 1 0.995 0.895\n", - " snowboard 128 7 0.974 0.857 0.925 0.689\n", - " sports ball 128 6 1 0.586 0.755 0.42\n", - " kite 128 10 0.965 0.9 0.951 0.646\n", - " baseball bat 128 4 1 0.846 0.995 0.697\n", - " baseball glove 128 7 0.962 0.429 0.451 0.367\n", - " skateboard 128 5 0.96 1 0.995 0.839\n", - " tennis racket 128 7 0.957 0.714 0.842 0.633\n", - " bottle 128 18 0.948 0.556 0.806 0.579\n", - " wine glass 128 16 1 0.66 0.938 0.693\n", - " cup 128 36 0.943 0.917 0.973 0.792\n", - " fork 128 6 0.964 1 0.995 0.779\n", - " knife 128 16 0.933 0.688 0.939 0.673\n", - " spoon 128 22 0.974 0.864 0.95 0.782\n", - " bowl 128 28 0.973 0.786 0.907 0.795\n", - " banana 128 1 0.847 1 0.995 0.895\n", - " sandwich 128 2 0.888 1 0.995 0.995\n", - " orange 128 4 0.943 1 0.995 0.883\n", - " broccoli 128 11 0.828 0.818 0.91 0.644\n", - " carrot 128 24 0.997 0.958 0.992 0.8\n", - " hot dog 128 2 0.895 1 0.995 0.995\n", - " pizza 128 5 0.959 1 0.995 0.978\n", - " donut 128 14 0.961 1 0.995 0.97\n", - " cake 128 4 0.939 1 0.995 0.954\n", - " chair 128 35 0.965 0.791 0.956 0.772\n", - " couch 128 6 0.96 1 0.995 0.928\n", - " potted plant 128 14 0.983 1 0.995 0.9\n", - " bed 128 3 0.924 1 0.995 0.995\n", - " dining table 128 13 0.999 0.923 0.973 0.835\n", - " toilet 128 2 0.933 1 0.995 0.948\n", - " tv 128 2 0.924 1 0.995 0.995\n", - " laptop 128 3 1 0.922 0.995 0.964\n", - " mouse 128 2 0.871 0.5 0.745 0.536\n", - " remote 128 8 1 0.675 0.886 0.7\n", - " cell phone 128 8 1 0.792 0.995 0.766\n", - " microwave 128 3 0.926 1 0.995 0.964\n", - " oven 128 5 0.985 1 0.995 0.94\n", - " sink 128 6 1 0.74 0.955 0.705\n", - " refrigerator 128 5 0.966 1 0.995 0.911\n", - " book 128 29 0.956 0.748 0.93 0.637\n", - " clock 128 9 0.987 1 0.995 0.894\n", - " vase 128 2 0.904 1 0.995 0.995\n", - " scissors 128 1 0.809 1 0.995 0.995\n", - " teddy bear 128 21 0.981 1 0.995 0.91\n", - " toothbrush 128 5 0.967 1 0.995 0.919\n", - "Speed: 0.2ms preprocess, 16.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_14_pre_val12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_14_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=True, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_14_finetune12\n", - "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n", - "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n" + "Speed: 0.2ms preprocess, 17.4ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_14_pre_val13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", + "\u001b[34m\u001b[1myolo/engine/trainer: \u001b[0mtask=detect, mode=train, model=None, data=coco128.yaml, epochs=10, patience=50, batch=16, imgsz=640, save=True, save_period=-1, cache=False, device=None, workers=8, project=None, name=step_14_finetune, exist_ok=False, pretrained=True, optimizer=auto, verbose=False, seed=0, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, overlap_mask=True, mask_ratio=4, dropout=0.0, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, show=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, vid_stride=1, line_width=None, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, boxes=True, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=0.01, lrf=0.01, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, mosaic=1.0, mixup=0.0, copy_paste=0.0, cfg=None, v5loader=False, tracker=botsort.yaml, save_dir=runs/detect/step_14_finetune13\n", + "\u001b[34m\u001b[1mAMP: \u001b[0mrunning Automatic Mixed Precision (AMP) checks with YOLOv8n...\n" ] }, { @@ -5771,149 +2512,79 @@ "name": "stderr", "output_type": "stream", "text": [ - "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backg\u001b[0m\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - "Plotting labels to runs/detect/step_14_finetune12/labels.jpg... \n", + "\u001b[34m\u001b[1mAMP: \u001b[0mchecks passed ✅\n", + "\u001b[34m\u001b[1mtrain: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgr\u001b[0m\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + "Plotting labels to runs/detect/step_14_finetune13/labels.jpg... \n", "\u001b[34m\u001b[1moptimizer:\u001b[0m AdamW(lr=0.000119, momentum=0.9) with parameter groups 105 weight(decay=0.0), 112 weight(decay=0.0005), 111 bias(decay=0.0)\n", "Image sizes 640 train, 640 val\n", "Using 8 dataloader workers\n", - "Logging results to \u001b[1mruns/detect/step_14_finetune12\u001b[0m\n", + "Logging results to \u001b[1mruns/detect/step_14_finetune13\u001b[0m\n", "Starting training for 10 epochs...\n", "Closing dataloader mosaic\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 1/10 9.94G 0.4922 0.3236 0.8733 122 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 1/10 11.4G 0.4922 0.3236 0.8733 122 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.955 0.882 0.946 0.822\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 2/10 10.1G 0.3523 0.2478 0.8197 112 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 2/10 11.4G 0.3523 0.2478 0.8197 112 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.953 0.89 0.943 0.817\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 3/10 9.99G 0.4283 0.2858 0.8487 116 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 3/10 11.4G 0.4283 0.2858 0.8487 116 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.95 0.89 0.941 0.82\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 4/10 10G 0.408 0.2829 0.8446 68 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 4/10 11.4G 0.408 0.2829 0.8446 68 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.953 0.886 0.945 0.819\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 5/10 10G 0.4299 0.2959 0.8395 96 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 5/10 11.4G 0.4299 0.2959 0.8395 96 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.902 0.944 0.823\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 6/10 10G 0.4327 0.3007 0.8581 120 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 6/10 11.4G 0.4327 0.3007 0.8581 120 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.946 0.889 0.943 0.82\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 7/10 10G 0.4762 0.3178 0.8584 69 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 7/10 11.4G 0.4762 0.3178 0.8584 69 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.944 0.892 0.945 0.814\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 8/10 9.98G 0.5052 0.3337 0.8724 141 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 8/10 11.4G 0.5052 0.3337 0.8724 141 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.946 0.893 0.944 0.819\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 9/10 10G 0.5167 0.3323 0.8776 104 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 9/10 11.4G 0.5167 0.3323 0.8776 104 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.898 0.944 0.819\n", "\n", " Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size\n", - " 10/10 10G 0.5994 0.3758 0.8965 170 640: 100%|██████████| 8/8 [00:0\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " 10/10 11.4G 0.5994 0.3758 0.8965 170 640: 100%|██████████| 8/8 [00:02\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.952 0.893 0.946 0.822\n", "\n", - "10 epochs completed in 0.018 hours.\n", - "Optimizer stripped from runs/detect/step_14_finetune12/weights/last.pt, 130.3MB\n", - "Optimizer stripped from runs/detect/step_14_finetune12/weights/best.pt, 130.3MB\n", + "10 epochs completed in 0.017 hours.\n", + "Optimizer stripped from runs/detect/step_14_finetune13/weights/last.pt, 130.3MB\n", + "Optimizer stripped from runs/detect/step_14_finetune13/weights/best.pt, 130.3MB\n", "\n", - "Validating runs/detect/step_14_finetune12/weights/best.pt...\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", + "Validating runs/detect/step_14_finetune13/weights/best.pt...\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n", "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 0 gradients, 123.4 GFLOPs\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.943 0.902 0.944 0.823\n", - " person 128 254 0.969 0.861 0.948 0.781\n", - " bicycle 128 6 0.921 0.667 0.724 0.579\n", - " car 128 46 0.926 0.543 0.722 0.384\n", - " motorcycle 128 5 0.945 1 0.995 0.923\n", - " airplane 128 6 0.967 1 0.995 0.939\n", - " bus 128 7 0.948 1 0.995 0.881\n", - " train 128 3 0.878 1 0.995 0.964\n", - " truck 128 12 1 0.745 0.979 0.67\n", - " boat 128 6 0.997 1 0.995 0.766\n", - " traffic light 128 14 0.79 0.539 0.536 0.367\n", - " stop sign 128 2 0.885 1 0.995 0.826\n", - " bench 128 9 1 0.944 0.995 0.85\n", - " bird 128 16 0.985 1 0.995 0.876\n", - " cat 128 4 0.965 1 0.995 0.995\n", - " dog 128 9 0.99 1 0.995 0.968\n", - " horse 128 2 0.898 1 0.995 0.995\n", - " elephant 128 17 0.934 1 0.995 0.882\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 0.935 1 0.995 0.995\n", - " giraffe 128 9 0.959 1 0.995 0.945\n", - " backpack 128 6 0.971 0.833 0.942 0.765\n", - " umbrella 128 18 0.984 0.944 0.992 0.839\n", - " handbag 128 19 1 0.838 0.896 0.73\n", - " tie 128 7 0.952 0.857 0.877 0.775\n", - " suitcase 128 4 0.942 1 0.995 0.865\n", - " frisbee 128 5 0.948 0.8 0.962 0.753\n", - " skis 128 1 0.833 1 0.995 0.895\n", - " snowboard 128 7 1 0.732 0.877 0.732\n", - " sports ball 128 6 1 0.628 0.684 0.403\n", - " kite 128 10 0.998 0.9 0.962 0.62\n", - " baseball bat 128 4 1 0.875 0.995 0.779\n", - " baseball glove 128 7 0.914 0.429 0.452 0.368\n", - " skateboard 128 5 0.981 1 0.995 0.83\n", - " tennis racket 128 7 1 0.754 0.863 0.655\n", - " bottle 128 18 0.784 0.604 0.836 0.639\n", - " wine glass 128 16 0.839 1 0.975 0.703\n", - " cup 128 36 1 0.925 0.977 0.821\n", - " fork 128 6 0.97 1 0.995 0.784\n", - " knife 128 16 0.929 0.814 0.926 0.712\n", - " spoon 128 22 1 0.909 0.959 0.711\n", - " bowl 128 28 0.958 0.821 0.919 0.818\n", - " banana 128 1 0.823 1 0.995 0.995\n", - " sandwich 128 2 0.887 1 0.995 0.995\n", - " orange 128 4 0.874 1 0.995 0.904\n", - " broccoli 128 11 0.948 0.818 0.915 0.685\n", - " carrot 128 24 0.959 0.985 0.993 0.782\n", - " hot dog 128 2 0.884 1 0.995 0.995\n", - " pizza 128 5 0.949 1 0.995 0.931\n", - " donut 128 14 0.993 1 0.995 0.955\n", - " cake 128 4 0.943 1 0.995 0.995\n", - " chair 128 35 0.967 0.85 0.972 0.806\n", - " couch 128 6 0.954 1 0.995 0.969\n", - " potted plant 128 14 0.98 1 0.995 0.916\n", - " bed 128 3 0.917 1 0.995 0.995\n", - " dining table 128 13 1 0.917 0.932 0.841\n", - " toilet 128 2 0.934 1 0.995 0.949\n", - " tv 128 2 0.884 1 0.995 0.995\n", - " laptop 128 3 0.929 1 0.995 0.965\n", - " mouse 128 2 0.857 0.5 0.695 0.544\n", - " remote 128 8 1 0.733 0.864 0.707\n", - " cell phone 128 8 1 0.764 0.995 0.727\n", - " microwave 128 3 0.92 1 0.995 0.945\n", - " oven 128 5 1 0.945 0.995 0.96\n", - " sink 128 6 1 0.727 0.922 0.697\n", - " refrigerator 128 5 0.943 1 0.995 0.961\n", - " book 128 29 0.913 0.828 0.939 0.631\n", - " clock 128 9 0.984 1 0.995 0.914\n", - " vase 128 2 0.891 1 0.995 0.995\n", - " scissors 128 1 0.812 1 0.995 0.995\n", - " teddy bear 128 21 0.98 1 0.995 0.894\n", - " toothbrush 128 5 0.944 1 0.995 0.995\n", - "Speed: 0.1ms preprocess, 4.6ms inference, 0.0ms loss, 0.2ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_14_finetune12\u001b[0m\n", - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n" + "Speed: 0.1ms preprocess, 4.2ms inference, 0.0ms loss, 0.2ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_14_finetune13\u001b[0m\n", + "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24253MiB)\n" ] }, { @@ -5930,82 +2601,11 @@ "output_type": "stream", "text": [ "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 0 gradients, 123.4 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgro\u001b[0m\n", - " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████\n", + "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/train2017.cache... 126 images, 2 backgrou\u001b[0m\n", + " Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████|\n", " all 128 929 0.947 0.899 0.943 0.819\n", - " person 128 254 0.977 0.848 0.949 0.778\n", - " bicycle 128 6 0.895 0.667 0.792 0.568\n", - " car 128 46 0.963 0.562 0.731 0.385\n", - " motorcycle 128 5 0.945 1 0.995 0.916\n", - " airplane 128 6 0.966 1 0.995 0.924\n", - " bus 128 7 0.952 1 0.995 0.811\n", - " train 128 3 0.881 1 0.995 0.964\n", - " truck 128 12 1 0.781 0.968 0.677\n", - " boat 128 6 0.998 1 0.995 0.777\n", - " traffic light 128 14 0.795 0.557 0.544 0.364\n", - " stop sign 128 2 0.89 1 0.995 0.85\n", - " bench 128 9 1 0.938 0.995 0.876\n", - " bird 128 16 0.984 1 0.995 0.881\n", - " cat 128 4 0.965 1 0.995 0.97\n", - " dog 128 9 0.984 1 0.995 0.967\n", - " horse 128 2 0.919 1 0.995 0.995\n", - " elephant 128 17 0.938 1 0.995 0.908\n", - " bear 128 1 1 1 0.995 0.995\n", - " zebra 128 4 1 1 0.995 0.995\n", - " giraffe 128 9 0.954 1 0.995 0.924\n", - " backpack 128 6 0.982 0.833 0.942 0.78\n", - " umbrella 128 18 0.987 0.944 0.992 0.851\n", - " handbag 128 19 1 0.831 0.895 0.734\n", - " tie 128 7 0.956 0.857 0.876 0.794\n", - " suitcase 128 4 0.94 1 0.995 0.848\n", - " frisbee 128 5 0.946 0.8 0.817 0.738\n", - " skis 128 1 0.835 1 0.995 0.895\n", - " snowboard 128 7 1 0.759 0.874 0.73\n", - " sports ball 128 6 1 0.615 0.771 0.432\n", - " kite 128 10 1 0.894 0.957 0.611\n", - " baseball bat 128 4 1 0.866 0.995 0.757\n", - " baseball glove 128 7 0.932 0.429 0.454 0.385\n", - " skateboard 128 5 0.965 1 0.995 0.831\n", - " tennis racket 128 7 1 0.753 0.861 0.637\n", - " bottle 128 18 0.783 0.603 0.831 0.594\n", - " wine glass 128 16 0.852 1 0.995 0.72\n", - " cup 128 36 1 0.937 0.975 0.828\n", - " fork 128 6 0.98 1 0.995 0.779\n", - " knife 128 16 1 0.86 0.932 0.71\n", - " spoon 128 22 0.999 0.909 0.96 0.725\n", - " bowl 128 28 0.958 0.805 0.9 0.789\n", - " banana 128 1 0.828 1 0.995 0.995\n", - " sandwich 128 2 0.88 1 0.995 0.995\n", - " orange 128 4 0.852 1 0.995 0.895\n", - " broccoli 128 11 0.952 0.818 0.913 0.682\n", - " carrot 128 24 0.959 0.979 0.993 0.784\n", - " hot dog 128 2 0.886 1 0.995 0.995\n", - " pizza 128 5 0.953 1 0.995 0.931\n", - " donut 128 14 0.996 1 0.995 0.972\n", - " cake 128 4 0.944 1 0.995 0.954\n", - " chair 128 35 0.934 0.808 0.967 0.798\n", - " couch 128 6 0.95 1 0.995 0.923\n", - " potted plant 128 14 0.979 1 0.995 0.932\n", - " bed 128 3 0.918 1 0.995 0.995\n", - " dining table 128 13 1 0.922 0.932 0.802\n", - " toilet 128 2 1 0.936 0.995 0.949\n", - " tv 128 2 0.889 1 0.995 0.995\n", - " laptop 128 3 0.94 1 0.995 0.965\n", - " mouse 128 2 0.876 0.5 0.638 0.521\n", - " remote 128 8 1 0.689 0.874 0.749\n", - " cell phone 128 8 1 0.661 0.995 0.725\n", - " microwave 128 3 0.93 1 0.995 0.944\n", - " oven 128 5 1 0.937 0.995 0.92\n", - " sink 128 6 1 0.724 0.904 0.699\n", - " refrigerator 128 5 0.933 1 0.995 0.959\n", - " book 128 29 0.906 0.793 0.939 0.636\n", - " clock 128 9 0.982 1 0.995 0.9\n", - " vase 128 2 0.897 1 0.995 0.995\n", - " scissors 128 1 0.821 1 0.995 0.995\n", - " teddy bear 128 21 0.98 1 0.995 0.887\n", - " toothbrush 128 5 0.944 1 0.995 0.98\n", - "Speed: 0.2ms preprocess, 16.7ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/step_14_post_val12\u001b[0m\n", + "Speed: 0.2ms preprocess, 17.2ms inference, 0.0ms loss, 0.4ms postprocess per image\n", + "Results saved to \u001b[1mruns/detect/step_14_post_val13\u001b[0m\n", "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CPU\n" ] }, @@ -6025,15 +2625,15 @@ "text": [ "YOLOv8l summary (fused): 285 layers, 32416863 parameters, 0 gradients, 123.4 GFLOPs\n", "\n", - "\u001b[34m\u001b[1mPyTorch:\u001b[0m starting from runs/detect/step_14_finetune12/weights/best.pt with input shape (1, 3, 640, 640) BCHW and output shape(s) (1, 84, 8400) (124.2 MB)\n", + "\u001b[34m\u001b[1mPyTorch:\u001b[0m starting from runs/detect/step_14_finetune13/weights/best.pt with input shape (1, 3, 640, 640) BCHW and output shape(s) (1, 84, 8400) (124.2 MB)\n", "\n", "\u001b[34m\u001b[1mONNX:\u001b[0m starting export with onnx 1.16.0 opset 17...\n", - "\u001b[34m\u001b[1mONNX:\u001b[0m export success ✅ 2.0s, saved as runs/detect/step_14_finetune12/weights/best.onnx (123.9 MB)\n", + "\u001b[34m\u001b[1mONNX:\u001b[0m export success ✅ 2.6s, saved as runs/detect/step_14_finetune13/weights/best.onnx (123.9 MB)\n", "\n", - "Export complete (2.8s)\n", - "Results saved to \u001b[1m/home/HubensN/fasterai/nbs/runs/detect/step_14_finetune12/weights\u001b[0m\n", - "Predict: yolo predict task=detect model=runs/detect/step_14_finetune12/weights/best.onnx imgsz=640 \n", - "Validate: yolo val task=detect model=runs/detect/step_14_finetune12/weights/best.onnx imgsz=640 data=/home/HubensN/miniconda3/envs/fasterai/lib/python3.9/site-packages/ultralytics/datasets/coco128.yaml \n", + "Export complete (3.5s)\n", + "Results saved to \u001b[1m/home/HubensN/fasterai/nbs/runs/detect/step_14_finetune13/weights\u001b[0m\n", + "Predict: yolo predict task=detect model=runs/detect/step_14_finetune13/weights/best.onnx imgsz=640 \n", + "Validate: yolo val task=detect model=runs/detect/step_14_finetune13/weights/best.onnx imgsz=640 data=/home/HubensN/miniconda3/envs/fasterai/lib/python3.9/site-packages/ultralytics/datasets/coco128.yaml \n", "Visualize: https://netron.app\n" ] } @@ -6059,103 +2659,6 @@ "## Post-Training Checks" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "ad3b5e49-3a87-4560-bab6-7b9e46962528", - "metadata": {}, - "outputs": [], - "source": [ - "model = YOLO('yolov8l.pt')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "09d607c5-fb08-4ac2-b930-da992b87a78a", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(82726406400.0, 43691520)" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "example_inputs = torch.randn(1, 3, pruning_cfg[\"imgsz\"], pruning_cfg[\"imgsz\"]).to(model.device)\n", - "base_macs, base_nparams = tp.utils.count_ops_and_params(model.model, example_inputs); base_macs, base_nparams" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "aeb73b69-8560-4eb1-9ba8-ff442c44f7e4", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "YOLOv8l summary (fused): 268 layers, 43668288 parameters, 0 gradients, 165.2 GFLOPs\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128\u001b[0m\n", - " Class Images Instances Box(P \n", - " all 128 929 0.833 0.694 0.83 0.659\n", - "Speed: 0.2ms preprocess, 22.6ms inference, 0.0ms loss, 0.4ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/val33\u001b[0m\n" - ] - } - ], - "source": [ - "results = model.val(\n", - " data='coco128.yaml',\n", - " batch=1,\n", - " imgsz=640,\n", - " verbose=False,\n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f2326a5f-6ecb-48b8-a99d-d73d12ef4134", - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "ultralytics.yolo.utils.metrics.DetMetrics object with attributes:\n", - "\n", - "ap_class_index: array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 13, 14, 15, 16, 17, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 71, 72, 73, 74, 75, 76, 77, 79])\n", - "box: ultralytics.yolo.utils.metrics.Metric object\n", - "confusion_matrix: \n", - "fitness: 0.675906972569436\n", - "keys: ['metrics/precision(B)', 'metrics/recall(B)', 'metrics/mAP50(B)', 'metrics/mAP50-95(B)']\n", - "maps: array([ 0.67313, 0.52749, 0.35361, 0.82095, 0.96459, 0.73294, 0.96262, 0.33383, 0.59455, 0.27281, 0.65884, 0.8955, 0.65884, 0.5898, 0.69265, 0.9071, 0.90664, 0.79925, 0.65884, 0.65884, 0.85369, 0.8955, 0.97279, 0.81737,\n", - " 0.49689, 0.69167, 0.37624, 0.66138, 0.65029, 0.72404, 0.8955, 0.75108, 0.39215, 0.1314, 0.46394, 0.35823, 0.50221, 0.65884, 0.29321, 0.46131, 0.55652, 0.63929, 0.31428, 0.5972, 0.52909, 0.69682, 0.995, 0.65884,\n", - " 0.995, 0.77321, 0.34151, 0.60396, 0.995, 0.87003, 0.89804, 0.90457, 0.57862, 0.70168, 0.66256, 0.80568, 0.60875, 0.9465, 0.94606, 0.81127, 0.34267, 0.66362, 0.65884, 0.43598, 0.86589, 0.28153, 0.65884, 0.29212,\n", - " 0.77938, 0.31383, 0.78713, 0.995, 0.2985, 0.70141, 0.65884, 0.83346])\n", - "names: {0: 'person', 1: 'bicycle', 2: 'car', 3: 'motorcycle', 4: 'airplane', 5: 'bus', 6: 'train', 7: 'truck', 8: 'boat', 9: 'traffic light', 10: 'fire hydrant', 11: 'stop sign', 12: 'parking meter', 13: 'bench', 14: 'bird', 15: 'cat', 16: 'dog', 17: 'horse', 18: 'sheep', 19: 'cow', 20: 'elephant', 21: 'bear', 22: 'zebra', 23: 'giraffe', 24: 'backpack', 25: 'umbrella', 26: 'handbag', 27: 'tie', 28: 'suitcase', 29: 'frisbee', 30: 'skis', 31: 'snowboard', 32: 'sports ball', 33: 'kite', 34: 'baseball bat', 35: 'baseball glove', 36: 'skateboard', 37: 'surfboard', 38: 'tennis racket', 39: 'bottle', 40: 'wine glass', 41: 'cup', 42: 'fork', 43: 'knife', 44: 'spoon', 45: 'bowl', 46: 'banana', 47: 'apple', 48: 'sandwich', 49: 'orange', 50: 'broccoli', 51: 'carrot', 52: 'hot dog', 53: 'pizza', 54: 'donut', 55: 'cake', 56: 'chair', 57: 'couch', 58: 'potted plant', 59: 'bed', 60: 'dining table', 61: 'toilet', 62: 'tv', 63: 'laptop', 64: 'mouse', 65: 'remote', 66: 'keyboard', 67: 'cell phone', 68: 'microwave', 69: 'oven', 70: 'toaster', 71: 'sink', 72: 'refrigerator', 73: 'book', 74: 'clock', 75: 'vase', 76: 'scissors', 77: 'teddy bear', 78: 'hair drier', 79: 'toothbrush'}\n", - "plot: True\n", - "results_dict: {'metrics/precision(B)': 0.8326166176236276, 'metrics/recall(B)': 0.6936424117852013, 'metrics/mAP50(B)': 0.8295267512014245, 'metrics/mAP50-95(B)': 0.6588381082769927, 'fitness': 0.675906972569436}\n", - "save_dir: Path('runs/detect/val33')\n", - "speed: {'preprocess': 0.18010102212429047, 'inference': 22.57813699543476, 'loss': 0.0046174973249435425, 'postprocess': 0.39862655103206635}" - ] - }, - "execution_count": null, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "results" - ] - }, { "cell_type": "code", "execution_count": null, @@ -6292,46 +2795,6 @@ "source": [ "model.export(format = 'onnx', half = True)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "62928f60-30a6-43d9-954d-d9760e35caac", - "metadata": {}, - "outputs": [], - "source": [ - "model = YOLO('/home/HubensN/fasterai/nbs/runs/detect/step_14_finetune4/weights/best.onnx')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5aec271-88b2-4616-9acc-7d71c8ba44c4", - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Ultralytics YOLOv8.0.132 🚀 Python-3.9.0 torch-2.2.1 CUDA:0 (NVIDIA GeForce RTX 3090, 24268MiB)\n", - "Loading yolov8l.onnx for ONNX Runtime inference...\n", - "Forcing batch=1 square inference (1,3,640,640) for non-PyTorch models\n", - "\u001b[34m\u001b[1mval: \u001b[0mScanning /home/HubensN/fasterai/nbs/datasets/coco128/labels/tra\u001b[0m\n", - " Class Images Instances Box(P R \n", - " all 128 929 0.731 0.767 0.828 0.658\n", - "Speed: 0.2ms preprocess, 13.1ms inference, 0.0ms loss, 0.5ms postprocess per image\n", - "Results saved to \u001b[1mruns/detect/val39\u001b[0m\n" - ] - } - ], - "source": [ - "results = model.val(\n", - " data='coco128.yaml',\n", - " batch=1,\n", - " imgsz=640,\n", - " verbose=False,\n", - " )" - ] } ], "metadata": {