In [1]:
! nvidia-smi -L
GPU 0: Tesla P100-PCIE-16GB (UUID: GPU-3dddfc79-2c0c-27e4-d184-5901b2865f53)
In [3]:
%%time
! pip install --upgrade ultralytics -qq
CPU times: user 143 ms, sys: 34.9 ms, total: 178 ms Wall time: 12.6 s
In [4]:
import ultralytics
print(ultralytics.__version__)
8.2.18
In [5]:
import warnings
warnings.filterwarnings("ignore")
import re
import glob
import random
import yaml
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import seaborn as sns
import IPython.display as display
from PIL import Image
import cv2
from ultralytics import YOLO
In [7]:
class CFG:
DEBUG = False
FRACTION = 0.05 if DEBUG else 1.0
SEED = 88
# classes
CLASSES = ['no-pcos', 'pcos']
NUM_CLASSES_TO_TRAIN = len(CLASSES)
# training
EPOCHS = 3 if DEBUG else 10 # 100
BATCH_SIZE = 4
BASE_MODEL = 'yolov8x' # yolov8n, yolov8s, yolov8m, yolov8l, yolov8x, yolov9c, yolov9e, yolo_nas_s, yolo_nas_m, yolo_nas_l
BASE_MODEL_WEIGHTS = f'{BASE_MODEL}.pt'
EXP_NAME = f'yolov8x{EPOCHS}_epochs'
OPTIMIZER = 'Adam' # SGD, Adam, Adamax, AdamW, NAdam, RAdam, RMSProp, auto
LR = 1e-5
LR_FACTOR = 0.001
WEIGHT_DECAY = 0.0005
DROPOUT = 0.2
PATIENCE = 10
PROFILE = False
LABEL_SMOOTHING = 0.0
# paths
CUSTOM_DATASET_DIR = '/kaggle/input/pcos234'
OUTPUT_DIR = './'
In [8]:
dict_file = {
'train': os.path.join(CFG.CUSTOM_DATASET_DIR, 'train'),
'val': os.path.join(CFG.CUSTOM_DATASET_DIR, 'valid'),
'test': os.path.join(CFG.CUSTOM_DATASET_DIR, 'test'),
'nc': CFG.NUM_CLASSES_TO_TRAIN,
'names': CFG.CLASSES
}
with open(os.path.join(CFG.OUTPUT_DIR, 'data.yaml'), 'w+') as file:
yaml.dump(dict_file, file)
In [9]:
### read yaml file created
def read_yaml_file(file_path = CFG.CUSTOM_DATASET_DIR):
with open(file_path, 'r') as file:
try:
data = yaml.safe_load(file)
return data
except yaml.YAMLError as e:
print("Error reading YAML:", e)
return None
### print it with newlines
def print_yaml_data(data):
formatted_yaml = yaml.dump(data, default_style=False)
print(formatted_yaml)
file_path = os.path.join(CFG.OUTPUT_DIR, 'data.yaml')
yaml_data = read_yaml_file(file_path)
if yaml_data:
print_yaml_data(yaml_data)
names: - no-pcos - pcos nc: 2 test: /kaggle/input/pcos234/test train: /kaggle/input/pcos234/train val: /kaggle/input/pcos234/valid
In [10]:
def display_image(image, print_info = True, hide_axis = False):
if isinstance(image, str): # Check if it's a file path
img = Image.open(image)
plt.imshow(img)
elif isinstance(image, np.ndarray): # Check if it's a NumPy array
image = image[..., ::-1] # BGR to RGB
img = Image.fromarray(image)
plt.imshow(img)
else:
raise ValueError("Unsupported image format")
if print_info:
print('Type: ', type(img), '\n')
print('Shape: ', np.array(img).shape, '\n')
if hide_axis:
plt.axis('off')
plt.show()
In [11]:
example_image_path = '/kaggle/input/pcos234/train/images/img_0_1033_jpg.rf.e837430c9bd50f29559ffa3eb5b938ef.jpg'
display_image(example_image_path, print_info = True, hide_axis = False)
Type: <class 'PIL.JpegImagePlugin.JpegImageFile'> Shape: (224, 224, 3)
In [12]:
def get_image_properties(image_path):
# Read the image file
img = cv2.imread('/kaggle/input/pcos234/train/images/img_0_1033_jpg.rf.e837430c9bd50f29559ffa3eb5b938ef.jpg')
# Check if the image file is read successfully
if img is None:
raise ValueError("Could not read image file")
# Get image properties
properties = {
"width": img.shape[1],
"height": img.shape[0],
"channels": img.shape[2] if len(img.shape) == 3 else 1,
"dtype": img.dtype,
}
return properties
In [13]:
img_properties = get_image_properties(example_image_path)
img_properties
Out[13]:
{'width': 224, 'height': 224, 'channels': 3, 'dtype': dtype('uint8')}
In [14]:
%%time
class_idx = {str(i): CFG.CLASSES[i] for i in range(CFG.NUM_CLASSES_TO_TRAIN)}
class_stat = {}
data_len = {}
class_info = []
for mode in ['train', 'valid', 'test']:
class_count = {CFG.CLASSES[i]: 0 for i in range(CFG.NUM_CLASSES_TO_TRAIN)}
path = os.path.join(CFG.CUSTOM_DATASET_DIR, mode, 'labels')
for file in os.listdir(path):
with open(os.path.join(path, file)) as f:
lines = f.readlines()
for cls in set([line[0] for line in lines]):
class_count[class_idx[cls]] += 1
data_len[mode] = len(os.listdir(path))
class_stat[mode] = class_count
class_info.append({'Mode': mode, **class_count, 'Data_Volume': data_len[mode]})
dataset_stats_df = pd.DataFrame(class_info)
dataset_stats_df
CPU times: user 13 ms, sys: 8.78 ms, total: 21.8 ms Wall time: 165 ms
Out[14]:
Mode | no-pcos | pcos | Data_Volume | |
---|---|---|---|---|
0 | train | 61 | 103 | 164 |
1 | valid | 13 | 10 | 23 |
2 | test | 27 | 20 | 47 |
In [15]:
# Create subplots with 1 row and 3 columns
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
# Plot vertical bar plots for each mode in subplots
for i, mode in enumerate(['train', 'valid', 'test']):
sns.barplot(
data=dataset_stats_df[dataset_stats_df['Mode'] == mode].drop(columns='Mode'),
orient='v',
ax=axes[i],
palette='Set2'
)
axes[i].set_title(f'{mode.capitalize()} Class Statistics')
axes[i].set_xlabel('Classes')
axes[i].set_ylabel('Count')
axes[i].tick_params(axis='x', rotation=90)
# Add annotations on top of each bar
for p in axes[i].patches:
axes[i].annotate(f"{int(p.get_height())}", (p.get_x() + p.get_width() / 2., p.get_height()),
ha='center', va='center', fontsize=8, color='black', xytext=(0, 5),
textcoords='offset points')
plt.tight_layout()
plt.show()
In [16]:
%%time
for mode in ['train', 'valid', 'test']:
print(f'\nImage sizes in {mode} set:')
img_size = 0
for file in glob.glob(os.path.join(CFG.CUSTOM_DATASET_DIR, mode, 'images', '*')):
image = Image.open(file)
if image.size != img_size:
print(f'{image.size}')
img_size = image.size
print('\n')
Image sizes in train set: (224, 224) Image sizes in valid set: (224, 224) Image sizes in test set: (224, 224) CPU times: user 29.9 ms, sys: 11.5 ms, total: 41.3 ms Wall time: 134 ms
In [ ]:
In [17]:
CFG.BASE_MODEL_WEIGHTS
Out[17]:
'yolov8x.pt'
In [18]:
import torch
model = YOLO(CFG.BASE_MODEL_WEIGHTS)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
In [19]:
print('Model: ', CFG.BASE_MODEL_WEIGHTS)
print('Epochs: ', CFG.EPOCHS)
print('Batch: ', CFG.BATCH_SIZE)
Model: yolov8x.pt Epochs: 10 Batch: 4
In [20]:
model = YOLO(CFG.BASE_MODEL_WEIGHTS)
In [21]:
%%time
### train
model.train(
data=os.path.join(CFG.OUTPUT_DIR, 'data.yaml'),
task='detect',
imgsz=(img_properties['height'], img_properties['width']),
epochs=CFG.EPOCHS,
batch=CFG.BATCH_SIZE,
optimizer=CFG.OPTIMIZER,
lr0=CFG.LR,
lrf=CFG.LR_FACTOR,
weight_decay=CFG.WEIGHT_DECAY,
dropout=CFG.DROPOUT,
fraction=CFG.FRACTION,
patience=CFG.PATIENCE,
profile=CFG.PROFILE,
label_smoothing=CFG.LABEL_SMOOTHING,
name=f'{CFG.BASE_MODEL}_{CFG.EXP_NAME}',
seed=CFG.SEED,
val=True,
amp=True,
exist_ok=True,
resume=False,
device=0, # Set to single GPU device
verbose=False,
)
Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB)
engine/trainer: task=detect, mode=train, model=yolov8x.pt, data=./data.yaml, epochs=10, time=None, patience=10, batch=4, imgsz=(224, 224), save=True, save_period=-1, cache=False, device=0, workers=8, project=None, name=yolov8x_yolov8x10_epochs, exist_ok=True, pretrained=True, optimizer=Adam, verbose=False, seed=88, deterministic=True, single_cls=False, rect=False, cos_lr=False, close_mosaic=10, resume=False, amp=True, fraction=1.0, profile=False, freeze=None, multi_scale=False, overlap_mask=True, mask_ratio=4, dropout=0.2, val=True, split=val, save_json=False, save_hybrid=False, conf=None, iou=0.7, max_det=300, half=False, dnn=False, plots=True, source=None, vid_stride=1, stream_buffer=False, visualize=False, augment=False, agnostic_nms=False, classes=None, retina_masks=False, embed=None, show=False, save_frames=False, save_txt=False, save_conf=False, save_crop=False, show_labels=True, show_conf=True, show_boxes=True, line_width=None, format=torchscript, keras=False, optimize=False, int8=False, dynamic=False, simplify=False, opset=None, workspace=4, nms=False, lr0=1e-05, lrf=0.001, momentum=0.937, weight_decay=0.0005, warmup_epochs=3.0, warmup_momentum=0.8, warmup_bias_lr=0.1, box=7.5, cls=0.5, dfl=1.5, pose=12.0, kobj=1.0, label_smoothing=0.0, nbs=64, hsv_h=0.015, hsv_s=0.7, hsv_v=0.4, degrees=0.0, translate=0.1, scale=0.5, shear=0.0, perspective=0.0, flipud=0.0, fliplr=0.5, bgr=0.0, mosaic=1.0, mixup=0.0, copy_paste=0.0, auto_augment=randaugment, erasing=0.4, crop_fraction=1.0, cfg=None, tracker=botsort.yaml, save_dir=runs/detect/yolov8x_yolov8x10_epochs
2024-05-19 05:45:51,603 INFO util.py:124 -- Outdated packages: ipywidgets==7.7.1 found, needs ipywidgets>=8 Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output. 2024-05-19 05:45:52,198 INFO util.py:124 -- Outdated packages: ipywidgets==7.7.1 found, needs ipywidgets>=8 Run `pip install -U ipywidgets`, then restart the notebook server for rich notebook output.
Overriding model.yaml nc=80 with nc=2
from n params module arguments
0 -1 1 2320 ultralytics.nn.modules.conv.Conv [3, 80, 3, 2]
1 -1 1 115520 ultralytics.nn.modules.conv.Conv [80, 160, 3, 2]
2 -1 3 436800 ultralytics.nn.modules.block.C2f [160, 160, 3, True]
3 -1 1 461440 ultralytics.nn.modules.conv.Conv [160, 320, 3, 2]
4 -1 6 3281920 ultralytics.nn.modules.block.C2f [320, 320, 6, True]
5 -1 1 1844480 ultralytics.nn.modules.conv.Conv [320, 640, 3, 2]
6 -1 6 13117440 ultralytics.nn.modules.block.C2f [640, 640, 6, True]
7 -1 1 3687680 ultralytics.nn.modules.conv.Conv [640, 640, 3, 2]
8 -1 3 6969600 ultralytics.nn.modules.block.C2f [640, 640, 3, True]
9 -1 1 1025920 ultralytics.nn.modules.block.SPPF [640, 640, 5]
10 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']
11 [-1, 6] 1 0 ultralytics.nn.modules.conv.Concat [1]
12 -1 3 7379200 ultralytics.nn.modules.block.C2f [1280, 640, 3]
13 -1 1 0 torch.nn.modules.upsampling.Upsample [None, 2, 'nearest']
14 [-1, 4] 1 0 ultralytics.nn.modules.conv.Concat [1]
15 -1 3 1948800 ultralytics.nn.modules.block.C2f [960, 320, 3]
16 -1 1 922240 ultralytics.nn.modules.conv.Conv [320, 320, 3, 2]
17 [-1, 12] 1 0 ultralytics.nn.modules.conv.Concat [1]
18 -1 3 7174400 ultralytics.nn.modules.block.C2f [960, 640, 3]
19 -1 1 3687680 ultralytics.nn.modules.conv.Conv [640, 640, 3, 2]
20 [-1, 9] 1 0 ultralytics.nn.modules.conv.Concat [1]
21 -1 3 7379200 ultralytics.nn.modules.block.C2f [1280, 640, 3]
22 [15, 18, 21] 1 8719894 ultralytics.nn.modules.head.Detect [2, [320, 640, 640]]
Model summary: 365 layers, 68154534 parameters, 68154518 gradients, 258.1 GFLOPs
Transferred 589/595 items from pretrained weights
TensorBoard: Start with 'tensorboard --logdir runs/detect/yolov8x_yolov8x10_epochs', view at http://localhost:6006/
wandb: Currently logged in as: akshitagupta15june (iiitpune-akshita). Use `wandb login --relogin` to force relogin
wandb version 0.17.0 is available! To upgrade, please run:
$ pip install wandb --upgrade
Tracking run with wandb version 0.16.6
Run data is saved locally in
/kaggle/working/wandb/run-20240519_054558-md0zt3qd
View project at https://wandb.ai/iiitpune-akshita/YOLOv8
Freezing layer 'model.22.dfl.conv.weight' AMP: running Automatic Mixed Precision (AMP) checks with YOLOv8n... AMP: checks passed ✅ WARNING ⚠️ updating to 'imgsz=224'. 'train' and 'val' imgsz must be an integer, while 'predict' and 'export' imgsz may be a [h, w] list or an integer, i.e. 'yolo export imgsz=640,480' or 'yolo export imgsz=640'
train: Scanning /kaggle/input/pcos234/train/labels... 164 images, 0 backgrounds, 0 corrupt: 100%|██████████| 164/164 [00:00<00:00, 993.40it/s]
train: WARNING ⚠️ Cache directory /kaggle/input/pcos234/train is not writeable, cache not saved.
albumentations: Blur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8))
val: Scanning /kaggle/input/pcos234/valid/labels... 23 images, 0 backgrounds, 0 corrupt: 100%|██████████| 23/23 [00:00<00:00, 835.16it/s]
val: WARNING ⚠️ Cache directory /kaggle/input/pcos234/valid is not writeable, cache not saved.
Plotting labels to runs/detect/yolov8x_yolov8x10_epochs/labels.jpg... optimizer: Adam(lr=1e-05, momentum=0.937) with parameter groups 97 weight(decay=0.0), 104 weight(decay=0.0005), 103 bias(decay=0.0) TensorBoard: model graph visualization added ✅ Image sizes 224 train, 224 val Using 4 dataloader workers Logging results to runs/detect/yolov8x_yolov8x10_epochs Starting training for 10 epochs... Closing dataloader mosaic albumentations: Blur(p=0.01, blur_limit=(3, 7)), MedianBlur(p=0.01, blur_limit=(3, 7)), ToGray(p=0.01), CLAHE(p=0.01, clip_limit=(1, 4.0), tile_grid_size=(8, 8)) Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
1/10 2.92G 1.671 2.418 2.082 4 224: 100%|██████████| 41/41 [00:05<00:00, 7.03it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 7.83it/s]
all 23 23 0.553 0.843 0.638 0.381
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
2/10 2.06G 1.172 1.317 1.65 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.35it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.64it/s]
all 23 23 0.672 0.9 0.894 0.593
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
3/10 2.07G 1.114 1.013 1.518 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.61it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.72it/s]
all 23 23 0.908 0.952 0.971 0.706
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
4/10 2.06G 0.9785 0.9088 1.462 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.65it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.81it/s]
all 23 23 0.96 0.923 0.99 0.732
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
5/10 2.06G 0.9462 0.7632 1.42 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.68it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 13.03it/s]
all 23 23 0.96 0.985 0.995 0.761
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
6/10 2.07G 0.9629 0.7344 1.441 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.47it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.72it/s]
all 23 23 0.971 0.999 0.995 0.762
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
7/10 2.07G 0.9294 0.7402 1.428 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.79it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.37it/s]
all 23 23 0.97 1 0.995 0.76
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
8/10 2.07G 0.9428 0.6974 1.423 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.65it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.60it/s]
all 23 23 0.965 1 0.995 0.752
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
9/10 2.07G 0.9727 0.6485 1.401 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.79it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.95it/s]
all 23 23 0.968 0.996 0.995 0.767
Epoch GPU_mem box_loss cls_loss dfl_loss Instances Size
10/10 2.07G 0.8816 0.6232 1.376 4 224: 100%|██████████| 41/41 [00:04<00:00, 8.53it/s] Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 12.55it/s]
all 23 23 0.968 1 0.995 0.767
10 epochs completed in 0.024 hours. Optimizer stripped from runs/detect/yolov8x_yolov8x10_epochs/weights/last.pt, 136.7MB Optimizer stripped from runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt, 136.7MB Validating runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt... Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB) Model summary (fused): 268 layers, 68125494 parameters, 0 gradients, 257.4 GFLOPs
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:00<00:00, 4.64it/s]
all 23 23 0.968 1 0.995 0.764
Speed: 0.1ms preprocess, 23.3ms inference, 0.0ms loss, 1.1ms postprocess per image
Results saved to runs/detect/yolov8x_yolov8x10_epochs
VBox(children=(Label(value='132.194 MB of 132.194 MB uploaded\r'), FloatProgress(value=1.0, max=1.0)))
Run history:
lr/pg0 | █▅▁▁▁▁▁▁▁▁ |
lr/pg1 | ▃▆█▇▆▅▄▃▂▁ |
lr/pg2 | ▃▆█▇▆▅▄▃▂▁ |
metrics/mAP50(B) | ▁▆████████ |
metrics/mAP50-95(B) | ▁▅▇▇██████ |
metrics/precision(B) | ▁▃▇███████ |
metrics/recall(B) | ▁▄▆▅▇█████ |
model/GFLOPs | ▁ |
model/parameters | ▁ |
model/speed_PyTorch(ms) | ▁ |
train/box_loss | █▄▃▂▂▂▁▂▂▁ |
train/cls_loss | █▄▃▂▂▁▁▁▁▁ |
train/dfl_loss | █▄▂▂▁▂▂▁▁▁ |
val/box_loss | █▄▃▃▃▂▃▂▁▁ |
val/cls_loss | █▄▂▂▁▁▁▁▁▁ |
val/dfl_loss | █▆▄▄▃▂▃▂▁▁ |
Run summary:
lr/pg0 | 0.0 |
lr/pg1 | 0.0 |
lr/pg2 | 0.0 |
metrics/mAP50(B) | 0.995 |
metrics/mAP50-95(B) | 0.76445 |
metrics/precision(B) | 0.96791 |
metrics/recall(B) | 1.0 |
model/GFLOPs | 258.128 |
model/parameters | 68154534 |
model/speed_PyTorch(ms) | 12.001 |
train/box_loss | 0.88165 |
train/cls_loss | 0.62319 |
train/dfl_loss | 1.37602 |
val/box_loss | 0.91086 |
val/cls_loss | 0.65577 |
val/dfl_loss | 1.44876 |
View run yolov8x_yolov8x10_epochs at: https://wandb.ai/iiitpune-akshita/YOLOv8/runs/md0zt3qd
View project at: https://wandb.ai/iiitpune-akshita/YOLOv8
Synced 5 W&B file(s), 21 media file(s), 5 artifact file(s) and 0 other file(s)
View project at: https://wandb.ai/iiitpune-akshita/YOLOv8
Synced 5 W&B file(s), 21 media file(s), 5 artifact file(s) and 0 other file(s)
Find logs at:
./wandb/run-20240519_054558-md0zt3qd/logs
CPU times: user 1min 40s, sys: 17.7 s, total: 1min 57s Wall time: 2min 24s
Out[21]:
ultralytics.utils.metrics.DetMetrics object with attributes: ap_class_index: array([0, 1]) box: ultralytics.utils.metrics.Metric object confusion_matrix: <ultralytics.utils.metrics.ConfusionMatrix object at 0x783c0476cc40> curves: ['Precision-Recall(B)', 'F1-Confidence(B)', 'Precision-Confidence(B)', 'Recall-Confidence(B)'] curves_results: [[array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023, 0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047, 0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071, 0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095, 0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912, 0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314, 0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717, 0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119, 0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522, 0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924, 0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326, 0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729, 0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131, 0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534, 0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936, 0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338, 0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741, 0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143, 0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546, 0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948, 0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035, 0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753, 0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155, 0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558, 0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996, 0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362, 0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765, 0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167, 0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957, 0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972, 0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374, 0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777, 0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179, 0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582, 0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984, 0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386, 0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789, 0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191, 0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594, 0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996, 0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398, 0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 1, 1, 1, ..., 1, 1, 0], [ 1, 1, 1, ..., 1, 1, 0]]), 'Recall', 'Precision'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023, 0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047, 0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071, 0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095, 0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912, 0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314, 0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717, 0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119, 0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522, 0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924, 0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326, 0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729, 0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131, 0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534, 0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936, 0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338, 0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741, 0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143, 0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546, 0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948, 0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035, 0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753, 0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155, 0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558, 0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996, 0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362, 0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765, 0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167, 0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957, 0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972, 0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374, 0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777, 0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179, 0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582, 0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984, 0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386, 0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789, 0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191, 0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594, 0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996, 0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398, 0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 0.010125, 0.010125, 0.012948, ..., 0, 0, 0], [ 0.0075472, 0.0075472, 0.008554, ..., 0, 0, 0]]), 'Confidence', 'F1'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023, 0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047, 0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071, 0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095, 0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912, 0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314, 0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717, 0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119, 0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522, 0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924, 0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326, 0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729, 0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131, 0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534, 0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936, 0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338, 0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741, 0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143, 0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546, 0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948, 0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035, 0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753, 0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155, 0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558, 0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996, 0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362, 0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765, 0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167, 0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957, 0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972, 0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374, 0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777, 0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179, 0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582, 0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984, 0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386, 0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789, 0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191, 0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594, 0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996, 0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398, 0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 0.0050881, 0.0050881, 0.006516, ..., 1, 1, 1], [ 0.0037879, 0.0037879, 0.0042954, ..., 1, 1, 1]]), 'Confidence', 'Precision'], [array([ 0, 0.001001, 0.002002, 0.003003, 0.004004, 0.005005, 0.006006, 0.007007, 0.008008, 0.009009, 0.01001, 0.011011, 0.012012, 0.013013, 0.014014, 0.015015, 0.016016, 0.017017, 0.018018, 0.019019, 0.02002, 0.021021, 0.022022, 0.023023, 0.024024, 0.025025, 0.026026, 0.027027, 0.028028, 0.029029, 0.03003, 0.031031, 0.032032, 0.033033, 0.034034, 0.035035, 0.036036, 0.037037, 0.038038, 0.039039, 0.04004, 0.041041, 0.042042, 0.043043, 0.044044, 0.045045, 0.046046, 0.047047, 0.048048, 0.049049, 0.05005, 0.051051, 0.052052, 0.053053, 0.054054, 0.055055, 0.056056, 0.057057, 0.058058, 0.059059, 0.06006, 0.061061, 0.062062, 0.063063, 0.064064, 0.065065, 0.066066, 0.067067, 0.068068, 0.069069, 0.07007, 0.071071, 0.072072, 0.073073, 0.074074, 0.075075, 0.076076, 0.077077, 0.078078, 0.079079, 0.08008, 0.081081, 0.082082, 0.083083, 0.084084, 0.085085, 0.086086, 0.087087, 0.088088, 0.089089, 0.09009, 0.091091, 0.092092, 0.093093, 0.094094, 0.095095, 0.096096, 0.097097, 0.098098, 0.099099, 0.1001, 0.1011, 0.1021, 0.1031, 0.1041, 0.10511, 0.10611, 0.10711, 0.10811, 0.10911, 0.11011, 0.11111, 0.11211, 0.11311, 0.11411, 0.11512, 0.11612, 0.11712, 0.11812, 0.11912, 0.12012, 0.12112, 0.12212, 0.12312, 0.12412, 0.12513, 0.12613, 0.12713, 0.12813, 0.12913, 0.13013, 0.13113, 0.13213, 0.13313, 0.13413, 0.13514, 0.13614, 0.13714, 0.13814, 0.13914, 0.14014, 0.14114, 0.14214, 0.14314, 0.14414, 0.14515, 0.14615, 0.14715, 0.14815, 0.14915, 0.15015, 0.15115, 0.15215, 0.15315, 0.15415, 0.15516, 0.15616, 0.15716, 0.15816, 0.15916, 0.16016, 0.16116, 0.16216, 0.16316, 0.16416, 0.16517, 0.16617, 0.16717, 0.16817, 0.16917, 0.17017, 0.17117, 0.17217, 0.17317, 0.17417, 0.17518, 0.17618, 0.17718, 0.17818, 0.17918, 0.18018, 0.18118, 0.18218, 0.18318, 0.18418, 0.18519, 0.18619, 0.18719, 0.18819, 0.18919, 0.19019, 0.19119, 0.19219, 0.19319, 0.19419, 0.1952, 0.1962, 0.1972, 0.1982, 0.1992, 0.2002, 0.2012, 0.2022, 0.2032, 0.2042, 0.20521, 0.20621, 0.20721, 0.20821, 0.20921, 0.21021, 0.21121, 0.21221, 0.21321, 0.21421, 0.21522, 0.21622, 0.21722, 0.21822, 0.21922, 0.22022, 0.22122, 0.22222, 0.22322, 0.22422, 0.22523, 0.22623, 0.22723, 0.22823, 0.22923, 0.23023, 0.23123, 0.23223, 0.23323, 0.23423, 0.23524, 0.23624, 0.23724, 0.23824, 0.23924, 0.24024, 0.24124, 0.24224, 0.24324, 0.24424, 0.24525, 0.24625, 0.24725, 0.24825, 0.24925, 0.25025, 0.25125, 0.25225, 0.25325, 0.25425, 0.25526, 0.25626, 0.25726, 0.25826, 0.25926, 0.26026, 0.26126, 0.26226, 0.26326, 0.26426, 0.26527, 0.26627, 0.26727, 0.26827, 0.26927, 0.27027, 0.27127, 0.27227, 0.27327, 0.27427, 0.27528, 0.27628, 0.27728, 0.27828, 0.27928, 0.28028, 0.28128, 0.28228, 0.28328, 0.28428, 0.28529, 0.28629, 0.28729, 0.28829, 0.28929, 0.29029, 0.29129, 0.29229, 0.29329, 0.29429, 0.2953, 0.2963, 0.2973, 0.2983, 0.2993, 0.3003, 0.3013, 0.3023, 0.3033, 0.3043, 0.30531, 0.30631, 0.30731, 0.30831, 0.30931, 0.31031, 0.31131, 0.31231, 0.31331, 0.31431, 0.31532, 0.31632, 0.31732, 0.31832, 0.31932, 0.32032, 0.32132, 0.32232, 0.32332, 0.32432, 0.32533, 0.32633, 0.32733, 0.32833, 0.32933, 0.33033, 0.33133, 0.33233, 0.33333, 0.33433, 0.33534, 0.33634, 0.33734, 0.33834, 0.33934, 0.34034, 0.34134, 0.34234, 0.34334, 0.34434, 0.34535, 0.34635, 0.34735, 0.34835, 0.34935, 0.35035, 0.35135, 0.35235, 0.35335, 0.35435, 0.35536, 0.35636, 0.35736, 0.35836, 0.35936, 0.36036, 0.36136, 0.36236, 0.36336, 0.36436, 0.36537, 0.36637, 0.36737, 0.36837, 0.36937, 0.37037, 0.37137, 0.37237, 0.37337, 0.37437, 0.37538, 0.37638, 0.37738, 0.37838, 0.37938, 0.38038, 0.38138, 0.38238, 0.38338, 0.38438, 0.38539, 0.38639, 0.38739, 0.38839, 0.38939, 0.39039, 0.39139, 0.39239, 0.39339, 0.39439, 0.3954, 0.3964, 0.3974, 0.3984, 0.3994, 0.4004, 0.4014, 0.4024, 0.4034, 0.4044, 0.40541, 0.40641, 0.40741, 0.40841, 0.40941, 0.41041, 0.41141, 0.41241, 0.41341, 0.41441, 0.41542, 0.41642, 0.41742, 0.41842, 0.41942, 0.42042, 0.42142, 0.42242, 0.42342, 0.42442, 0.42543, 0.42643, 0.42743, 0.42843, 0.42943, 0.43043, 0.43143, 0.43243, 0.43343, 0.43443, 0.43544, 0.43644, 0.43744, 0.43844, 0.43944, 0.44044, 0.44144, 0.44244, 0.44344, 0.44444, 0.44545, 0.44645, 0.44745, 0.44845, 0.44945, 0.45045, 0.45145, 0.45245, 0.45345, 0.45445, 0.45546, 0.45646, 0.45746, 0.45846, 0.45946, 0.46046, 0.46146, 0.46246, 0.46346, 0.46446, 0.46547, 0.46647, 0.46747, 0.46847, 0.46947, 0.47047, 0.47147, 0.47247, 0.47347, 0.47447, 0.47548, 0.47648, 0.47748, 0.47848, 0.47948, 0.48048, 0.48148, 0.48248, 0.48348, 0.48448, 0.48549, 0.48649, 0.48749, 0.48849, 0.48949, 0.49049, 0.49149, 0.49249, 0.49349, 0.49449, 0.4955, 0.4965, 0.4975, 0.4985, 0.4995, 0.5005, 0.5015, 0.5025, 0.5035, 0.5045, 0.50551, 0.50651, 0.50751, 0.50851, 0.50951, 0.51051, 0.51151, 0.51251, 0.51351, 0.51451, 0.51552, 0.51652, 0.51752, 0.51852, 0.51952, 0.52052, 0.52152, 0.52252, 0.52352, 0.52452, 0.52553, 0.52653, 0.52753, 0.52853, 0.52953, 0.53053, 0.53153, 0.53253, 0.53353, 0.53453, 0.53554, 0.53654, 0.53754, 0.53854, 0.53954, 0.54054, 0.54154, 0.54254, 0.54354, 0.54454, 0.54555, 0.54655, 0.54755, 0.54855, 0.54955, 0.55055, 0.55155, 0.55255, 0.55355, 0.55455, 0.55556, 0.55656, 0.55756, 0.55856, 0.55956, 0.56056, 0.56156, 0.56256, 0.56356, 0.56456, 0.56557, 0.56657, 0.56757, 0.56857, 0.56957, 0.57057, 0.57157, 0.57257, 0.57357, 0.57457, 0.57558, 0.57658, 0.57758, 0.57858, 0.57958, 0.58058, 0.58158, 0.58258, 0.58358, 0.58458, 0.58559, 0.58659, 0.58759, 0.58859, 0.58959, 0.59059, 0.59159, 0.59259, 0.59359, 0.59459, 0.5956, 0.5966, 0.5976, 0.5986, 0.5996, 0.6006, 0.6016, 0.6026, 0.6036, 0.6046, 0.60561, 0.60661, 0.60761, 0.60861, 0.60961, 0.61061, 0.61161, 0.61261, 0.61361, 0.61461, 0.61562, 0.61662, 0.61762, 0.61862, 0.61962, 0.62062, 0.62162, 0.62262, 0.62362, 0.62462, 0.62563, 0.62663, 0.62763, 0.62863, 0.62963, 0.63063, 0.63163, 0.63263, 0.63363, 0.63463, 0.63564, 0.63664, 0.63764, 0.63864, 0.63964, 0.64064, 0.64164, 0.64264, 0.64364, 0.64464, 0.64565, 0.64665, 0.64765, 0.64865, 0.64965, 0.65065, 0.65165, 0.65265, 0.65365, 0.65465, 0.65566, 0.65666, 0.65766, 0.65866, 0.65966, 0.66066, 0.66166, 0.66266, 0.66366, 0.66466, 0.66567, 0.66667, 0.66767, 0.66867, 0.66967, 0.67067, 0.67167, 0.67267, 0.67367, 0.67467, 0.67568, 0.67668, 0.67768, 0.67868, 0.67968, 0.68068, 0.68168, 0.68268, 0.68368, 0.68468, 0.68569, 0.68669, 0.68769, 0.68869, 0.68969, 0.69069, 0.69169, 0.69269, 0.69369, 0.69469, 0.6957, 0.6967, 0.6977, 0.6987, 0.6997, 0.7007, 0.7017, 0.7027, 0.7037, 0.7047, 0.70571, 0.70671, 0.70771, 0.70871, 0.70971, 0.71071, 0.71171, 0.71271, 0.71371, 0.71471, 0.71572, 0.71672, 0.71772, 0.71872, 0.71972, 0.72072, 0.72172, 0.72272, 0.72372, 0.72472, 0.72573, 0.72673, 0.72773, 0.72873, 0.72973, 0.73073, 0.73173, 0.73273, 0.73373, 0.73473, 0.73574, 0.73674, 0.73774, 0.73874, 0.73974, 0.74074, 0.74174, 0.74274, 0.74374, 0.74474, 0.74575, 0.74675, 0.74775, 0.74875, 0.74975, 0.75075, 0.75175, 0.75275, 0.75375, 0.75475, 0.75576, 0.75676, 0.75776, 0.75876, 0.75976, 0.76076, 0.76176, 0.76276, 0.76376, 0.76476, 0.76577, 0.76677, 0.76777, 0.76877, 0.76977, 0.77077, 0.77177, 0.77277, 0.77377, 0.77477, 0.77578, 0.77678, 0.77778, 0.77878, 0.77978, 0.78078, 0.78178, 0.78278, 0.78378, 0.78478, 0.78579, 0.78679, 0.78779, 0.78879, 0.78979, 0.79079, 0.79179, 0.79279, 0.79379, 0.79479, 0.7958, 0.7968, 0.7978, 0.7988, 0.7998, 0.8008, 0.8018, 0.8028, 0.8038, 0.8048, 0.80581, 0.80681, 0.80781, 0.80881, 0.80981, 0.81081, 0.81181, 0.81281, 0.81381, 0.81481, 0.81582, 0.81682, 0.81782, 0.81882, 0.81982, 0.82082, 0.82182, 0.82282, 0.82382, 0.82482, 0.82583, 0.82683, 0.82783, 0.82883, 0.82983, 0.83083, 0.83183, 0.83283, 0.83383, 0.83483, 0.83584, 0.83684, 0.83784, 0.83884, 0.83984, 0.84084, 0.84184, 0.84284, 0.84384, 0.84484, 0.84585, 0.84685, 0.84785, 0.84885, 0.84985, 0.85085, 0.85185, 0.85285, 0.85385, 0.85485, 0.85586, 0.85686, 0.85786, 0.85886, 0.85986, 0.86086, 0.86186, 0.86286, 0.86386, 0.86486, 0.86587, 0.86687, 0.86787, 0.86887, 0.86987, 0.87087, 0.87187, 0.87287, 0.87387, 0.87487, 0.87588, 0.87688, 0.87788, 0.87888, 0.87988, 0.88088, 0.88188, 0.88288, 0.88388, 0.88488, 0.88589, 0.88689, 0.88789, 0.88889, 0.88989, 0.89089, 0.89189, 0.89289, 0.89389, 0.89489, 0.8959, 0.8969, 0.8979, 0.8989, 0.8999, 0.9009, 0.9019, 0.9029, 0.9039, 0.9049, 0.90591, 0.90691, 0.90791, 0.90891, 0.90991, 0.91091, 0.91191, 0.91291, 0.91391, 0.91491, 0.91592, 0.91692, 0.91792, 0.91892, 0.91992, 0.92092, 0.92192, 0.92292, 0.92392, 0.92492, 0.92593, 0.92693, 0.92793, 0.92893, 0.92993, 0.93093, 0.93193, 0.93293, 0.93393, 0.93493, 0.93594, 0.93694, 0.93794, 0.93894, 0.93994, 0.94094, 0.94194, 0.94294, 0.94394, 0.94494, 0.94595, 0.94695, 0.94795, 0.94895, 0.94995, 0.95095, 0.95195, 0.95295, 0.95395, 0.95495, 0.95596, 0.95696, 0.95796, 0.95896, 0.95996, 0.96096, 0.96196, 0.96296, 0.96396, 0.96496, 0.96597, 0.96697, 0.96797, 0.96897, 0.96997, 0.97097, 0.97197, 0.97297, 0.97397, 0.97497, 0.97598, 0.97698, 0.97798, 0.97898, 0.97998, 0.98098, 0.98198, 0.98298, 0.98398, 0.98498, 0.98599, 0.98699, 0.98799, 0.98899, 0.98999, 0.99099, 0.99199, 0.99299, 0.99399, 0.99499, 0.996, 0.997, 0.998, 0.999, 1]), array([[ 1, 1, 1, ..., 0, 0, 0], [ 1, 1, 1, ..., 0, 0, 0]]), 'Confidence', 'Recall']] fitness: 0.7875007178951741 keys: ['metrics/precision(B)', 'metrics/recall(B)', 'metrics/mAP50(B)', 'metrics/mAP50-95(B)'] maps: array([ 0.80652, 0.72237]) names: {0: 'no-pcos', 1: 'pcos'} plot: True results_dict: {'metrics/precision(B)': 0.9679109507396992, 'metrics/recall(B)': 1.0, 'metrics/mAP50(B)': 0.995, 'metrics/mAP50-95(B)': 0.7644452421057489, 'fitness': 0.7875007178951741} save_dir: PosixPath('runs/detect/yolov8x_yolov8x10_epochs') speed: {'preprocess': 0.06002965180770211, 'inference': 23.263174554576047, 'loss': 0.002176865287449049, 'postprocess': 1.0500368864639944} task: 'detect'
In [22]:
img_properties
# Export the model
model.export(
format = 'onnx', # openvino, onnx, engine, tflite
imgsz = (img_properties['height'], img_properties['width']),
half = False,
int8 = False,
simplify = False,
nms = False,
)
results_paths = [
i for i in
glob.glob(f'{CFG.OUTPUT_DIR}runs/detect/{CFG.BASE_MODEL}_{CFG.EXP_NAME}/*.png') +
glob.glob(f'{CFG.OUTPUT_DIR}runs/detect/{CFG.BASE_MODEL}_{CFG.EXP_NAME}/*.jpg')
if 'batch' not in i
]
results_paths
%matplotlib inline
# Loading the best performing model
model = YOLO('/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt')
metrics = model.val(data='/kaggle/working/data.yaml', split = 'test')
Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB) Model summary (fused): 268 layers, 68125494 parameters, 0 gradients, 257.4 GFLOPs PyTorch: starting from 'runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt' with input shape (1, 3, 224, 224) BCHW and output shape(s) (1, 6, 1029) (130.3 MB) ONNX: starting export with onnx 1.16.0 opset 17... ONNX: export success ✅ 4.0s, saved as 'runs/detect/yolov8x_yolov8x10_epochs/weights/best.onnx' (260.0 MB) Export complete (5.2s) Results saved to /kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/weights Predict: yolo predict task=detect model=runs/detect/yolov8x_yolov8x10_epochs/weights/best.onnx imgsz=224 Validate: yolo val task=detect model=runs/detect/yolov8x_yolov8x10_epochs/weights/best.onnx imgsz=224 data=./data.yaml Visualize: https://netron.app Ultralytics YOLOv8.2.18 🚀 Python-3.10.13 torch-2.1.2 CUDA:0 (Tesla P100-PCIE-16GB, 16276MiB) Model summary (fused): 268 layers, 68125494 parameters, 0 gradients, 257.4 GFLOPs
val: Scanning /kaggle/input/pcos234/test/labels... 47 images, 0 backgrounds, 0 corrupt: 100%|██████████| 47/47 [00:00<00:00, 1084.20it/s]
val: WARNING ⚠️ Cache directory /kaggle/input/pcos234/test is not writeable, cache not saved.
Class Images Instances Box(P R mAP50 mAP50-95): 100%|██████████| 3/3 [00:01<00:00, 2.13it/s]
all 47 47 0.855 0.855 0.929 0.656
no-pcos 47 27 0.884 1 0.987 0.756
pcos 47 20 0.826 0.71 0.87 0.556
Speed: 0.0ms preprocess, 15.9ms inference, 0.0ms loss, 9.5ms postprocess per image
Results saved to runs/detect/val2
In [23]:
example_image_path = '/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/val_batch1_pred.jpg'
display_image(example_image_path)
Type: <class 'PIL.JpegImagePlugin.JpegImageFile'> Shape: (768, 768, 3)
In [24]:
# Import the necessary libraries
from ultralytics import YOLO
import matplotlib.pyplot as plt
import cv2
# Load the best performing model
model = YOLO('/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt')
# Path to your test image
image_path = '/kaggle/input/pcos234/test/images/pco_4_jpg.rf.6fd0a62e06a0a87288a6b4c754b40d53.jpg'
# Run inference on the single image
results = model.predict(source=image_path, save=False, show=False)
# Get the first result (assuming one image)
result = results[0]
# Convert the result to an OpenCV image
result_image = result.plot()
# Convert BGR image to RGB for displaying with Matplotlib
result_image_rgb = cv2.cvtColor(result_image, cv2.COLOR_BGR2RGB)
# Display the image with detections
plt.imshow(result_image_rgb)
plt.axis('off') # Hide axes
plt.show()
image 1/1 /kaggle/input/pcos234/test/images/pco_4_jpg.rf.6fd0a62e06a0a87288a6b4c754b40d53.jpg: 224x224 1 pcos, 17.8ms Speed: 0.5ms preprocess, 17.8ms inference, 3.0ms postprocess per image at shape (1, 3, 224, 224)
In [25]:
# Import the necessary libraries
from ultralytics import YOLO
import matplotlib.pyplot as plt
import cv2
# Load the best performing model
model = YOLO('/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt')
# Path to your test image
image_path = '/kaggle/input/pcos234/test/images/pco_8_jpg.rf.915e3e663ddde584151479ff93aae822.jpg'
# Run inference on the single image
results = model.predict(source=image_path, save=False, show=False)
# Get the first result (assuming one image)
result = results[0]
# Convert the result to an OpenCV image
result_image = result.plot()
# Convert BGR image to RGB for displaying with Matplotlib
result_image_rgb = cv2.cvtColor(result_image, cv2.COLOR_BGR2RGB)
# Display the image with detections
plt.imshow(result_image_rgb)
plt.axis('off') # Hide axes
plt.show()
image 1/1 /kaggle/input/pcos234/test/images/pco_8_jpg.rf.915e3e663ddde584151479ff93aae822.jpg: 224x224 1 pcos, 14.5ms Speed: 0.6ms preprocess, 14.5ms inference, 1.3ms postprocess per image at shape (1, 3, 224, 224)
In [26]:
# Import the necessary libraries
from ultralytics import YOLO
import matplotlib.pyplot as plt
import cv2
# Load the best performing model
model = YOLO('/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/weights/best.pt')
# Path to your test image
image_path = '/kaggle/input/pcos234/test/images/img_0_114_jpg.rf.ea6075be6764b1f5920d5d9453a13a27.jpg'
# Run inference on the single image
results = model.predict(source=image_path, save=False, show=False)
# Get the first result (assuming one image)
result = results[0]
# Convert the result to an OpenCV image
result_image = result.plot()
# Convert BGR image to RGB for displaying with Matplotlib
result_image_rgb = cv2.cvtColor(result_image, cv2.COLOR_BGR2RGB)
# Display the image with detections
plt.imshow(result_image_rgb)
plt.axis('off') # Hide axes
plt.show()
image 1/1 /kaggle/input/pcos234/test/images/img_0_114_jpg.rf.ea6075be6764b1f5920d5d9453a13a27.jpg: 224x224 1 no-pcos, 14.5ms Speed: 0.4ms preprocess, 14.5ms inference, 1.3ms postprocess per image at shape (1, 3, 224, 224)
In [27]:
from IPython.display import Image
# Path to the image
image_path = '/kaggle/working/runs/detect/yolov8x_yolov8x10_epochs/val_batch1_pred.jpg'
# Display the image
Image(filename=image_path)
Out[27]:
In [ ]:
from nbformat import read, write
with open("your_notebook.ipynb", "r", encoding="utf-8") as f:
nb = read(f, as_version=4)
for cell in nb.cells:
if cell.cell_type == "code":
cell.outputs = []
with open("your_notebook.ipynb", "w", encoding="utf-8") as f:
write(nb, f)