Chargement et inspection de l'image¶
In [ ]:
#dossier de travail
import os
os.chdir("C:/Users/ricco/Desktop/demo")
In [ ]:
#chargement de l'image
from torchvision.io import read_image
img = read_image("image_2.jpg")
In [ ]:
#type de l'objet
type(img)
Out[ ]:
torch.Tensor
In [ ]:
#dimensions - deja tenseur
img.shape
Out[ ]:
torch.Size([3, 925, 1280])
In [ ]:
#mettre en numpy pour affichage
#repositionner le canal
import numpy as np
img_np = np.transpose(img.numpy(),axes=(1,2,0))
img_np.shape
Out[ ]:
(925, 1280, 3)
In [ ]:
#affichage
import matplotlib.pyplot as plt
plt.imshow(img_np)
Out[ ]:
<matplotlib.image.AxesImage at 0x1ec4284d1d0>
Utilisation des modèles pré-entraînés avec PyTorch¶
In [ ]:
#modèles pré-entraînés dispos
from torchvision import models
dir(models)
Out[ ]:
['AlexNet', 'AlexNet_Weights', 'ConvNeXt', 'ConvNeXt_Base_Weights', 'ConvNeXt_Large_Weights', 'ConvNeXt_Small_Weights', 'ConvNeXt_Tiny_Weights', 'DenseNet', 'DenseNet121_Weights', 'DenseNet161_Weights', 'DenseNet169_Weights', 'DenseNet201_Weights', 'EfficientNet', 'EfficientNet_B0_Weights', 'EfficientNet_B1_Weights', 'EfficientNet_B2_Weights', 'EfficientNet_B3_Weights', 'EfficientNet_B4_Weights', 'EfficientNet_B5_Weights', 'EfficientNet_B6_Weights', 'EfficientNet_B7_Weights', 'EfficientNet_V2_L_Weights', 'EfficientNet_V2_M_Weights', 'EfficientNet_V2_S_Weights', 'GoogLeNet', 'GoogLeNetOutputs', 'GoogLeNet_Weights', 'Inception3', 'InceptionOutputs', 'Inception_V3_Weights', 'MNASNet', 'MNASNet0_5_Weights', 'MNASNet0_75_Weights', 'MNASNet1_0_Weights', 'MNASNet1_3_Weights', 'MaxVit', 'MaxVit_T_Weights', 'MobileNetV2', 'MobileNetV3', 'MobileNet_V2_Weights', 'MobileNet_V3_Large_Weights', 'MobileNet_V3_Small_Weights', 'RegNet', 'RegNet_X_16GF_Weights', 'RegNet_X_1_6GF_Weights', 'RegNet_X_32GF_Weights', 'RegNet_X_3_2GF_Weights', 'RegNet_X_400MF_Weights', 'RegNet_X_800MF_Weights', 'RegNet_X_8GF_Weights', 'RegNet_Y_128GF_Weights', 'RegNet_Y_16GF_Weights', 'RegNet_Y_1_6GF_Weights', 'RegNet_Y_32GF_Weights', 'RegNet_Y_3_2GF_Weights', 'RegNet_Y_400MF_Weights', 'RegNet_Y_800MF_Weights', 'RegNet_Y_8GF_Weights', 'ResNeXt101_32X8D_Weights', 'ResNeXt101_64X4D_Weights', 'ResNeXt50_32X4D_Weights', 'ResNet', 'ResNet101_Weights', 'ResNet152_Weights', 'ResNet18_Weights', 'ResNet34_Weights', 'ResNet50_Weights', 'ShuffleNetV2', 'ShuffleNet_V2_X0_5_Weights', 'ShuffleNet_V2_X1_0_Weights', 'ShuffleNet_V2_X1_5_Weights', 'ShuffleNet_V2_X2_0_Weights', 'SqueezeNet', 'SqueezeNet1_0_Weights', 'SqueezeNet1_1_Weights', 'SwinTransformer', 'Swin_B_Weights', 'Swin_S_Weights', 'Swin_T_Weights', 'Swin_V2_B_Weights', 'Swin_V2_S_Weights', 'Swin_V2_T_Weights', 'VGG', 'VGG11_BN_Weights', 'VGG11_Weights', 'VGG13_BN_Weights', 'VGG13_Weights', 'VGG16_BN_Weights', 'VGG16_Weights', 'VGG19_BN_Weights', 'VGG19_Weights', 'ViT_B_16_Weights', 'ViT_B_32_Weights', 'ViT_H_14_Weights', 'ViT_L_16_Weights', 'ViT_L_32_Weights', 'VisionTransformer', 'Weights', 'WeightsEnum', 'Wide_ResNet101_2_Weights', 'Wide_ResNet50_2_Weights', '_GoogLeNetOutputs', '_InceptionOutputs', '__builtins__', '__cached__', '__doc__', '__file__', '__loader__', '__name__', '__package__', '__path__', '__spec__', '_api', '_meta', '_utils', 'alexnet', 'convnext', 'convnext_base', 'convnext_large', 'convnext_small', 'convnext_tiny', 'densenet', 'densenet121', 'densenet161', 'densenet169', 'densenet201', 'detection', 'efficientnet', 'efficientnet_b0', 'efficientnet_b1', 'efficientnet_b2', 'efficientnet_b3', 'efficientnet_b4', 'efficientnet_b5', 'efficientnet_b6', 'efficientnet_b7', 'efficientnet_v2_l', 'efficientnet_v2_m', 'efficientnet_v2_s', 'get_model', 'get_model_builder', 'get_model_weights', 'get_weight', 'googlenet', 'inception', 'inception_v3', 'list_models', 'maxvit', 'maxvit_t', 'mnasnet', 'mnasnet0_5', 'mnasnet0_75', 'mnasnet1_0', 'mnasnet1_3', 'mobilenet', 'mobilenet_v2', 'mobilenet_v3_large', 'mobilenet_v3_small', 'mobilenetv2', 'mobilenetv3', 'optical_flow', 'quantization', 'regnet', 'regnet_x_16gf', 'regnet_x_1_6gf', 'regnet_x_32gf', 'regnet_x_3_2gf', 'regnet_x_400mf', 'regnet_x_800mf', 'regnet_x_8gf', 'regnet_y_128gf', 'regnet_y_16gf', 'regnet_y_1_6gf', 'regnet_y_32gf', 'regnet_y_3_2gf', 'regnet_y_400mf', 'regnet_y_800mf', 'regnet_y_8gf', 'resnet', 'resnet101', 'resnet152', 'resnet18', 'resnet34', 'resnet50', 'resnext101_32x8d', 'resnext101_64x4d', 'resnext50_32x4d', 'segmentation', 'shufflenet_v2_x0_5', 'shufflenet_v2_x1_0', 'shufflenet_v2_x1_5', 'shufflenet_v2_x2_0', 'shufflenetv2', 'squeezenet', 'squeezenet1_0', 'squeezenet1_1', 'swin_b', 'swin_s', 'swin_t', 'swin_transformer', 'swin_v2_b', 'swin_v2_s', 'swin_v2_t', 'vgg', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn', 'vgg19', 'vgg19_bn', 'video', 'vision_transformer', 'vit_b_16', 'vit_b_32', 'vit_h_14', 'vit_l_16', 'vit_l_32', 'wide_resnet101_2', 'wide_resnet50_2']
ResNet50¶
In [ ]:
#voir -- https://pytorch.org/vision/stable/models.html
#attention, le chargement des poids (coefficients) en ligne
#peut prendre du temps lors de la 1ere exécution
#voir C:\Users\ricco\.cache\torch\hub\checkpoints
from torchvision.models import resnet50, ResNet50_Weights
# Step 1: Initialize model with the best available weights
model_res = resnet50(weights=ResNet50_Weights.DEFAULT)
# passer en mode évaluation pour le classement
model_res.eval()
Out[ ]:
ResNet( (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False) (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False) (layer1): Sequential( (0): Bottleneck( (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) (downsample): Sequential( (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (1): Bottleneck( (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (2): Bottleneck( (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) ) (layer2): Sequential( (0): Bottleneck( (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) (downsample): Sequential( (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False) (1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (1): Bottleneck( (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (2): Bottleneck( (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (3): Bottleneck( (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) ) (layer3): Sequential( (0): Bottleneck( (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) (downsample): Sequential( (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False) (1): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (1): Bottleneck( (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (2): Bottleneck( (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (3): Bottleneck( (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (4): Bottleneck( (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (5): Bottleneck( (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(1024, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) ) (layer4): Sequential( (0): Bottleneck( (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False) (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) (downsample): Sequential( (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False) (1): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) ) ) (1): Bottleneck( (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) (2): Bottleneck( (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn1): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False) (bn2): BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False) (bn3): BatchNorm2d(2048, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) (relu): ReLU(inplace=True) ) ) (avgpool): AdaptiveAvgPool2d(output_size=(1, 1)) (fc): Linear(in_features=2048, out_features=1000, bias=True) )
In [ ]:
# Step 2: Initialize the inference transforms
# nécessaire pour la préparation de l'image
preprocess_res = ResNet50_Weights.DEFAULT.transforms()
# Step 3: Apply inference preprocessing transforms
# -> rajouter une dim. pour être compatible avec les batches
# -> on notera aussi que la définition a été modifiée en (224, 224)
batch_res = preprocess_res(img).unsqueeze(0)
print(batch_res.shape)
torch.Size([1, 3, 224, 224])
In [ ]:
# Step 4: Use the model
# degré d'appartenance aux classes
# squeeze pour supprimer la dim = 1 induite par batch
# softmax pour normaliser l'app. aux classes [0, 1], sum = 1
prediction = model_res(batch_res).squeeze(0).softmax(0)
print(prediction)
tensor([4.0963e-04, 5.1027e-04, 3.3358e-04, 3.5793e-04, 4.6254e-04, 3.9009e-04, 4.7494e-04, 5.6449e-04, 4.7050e-04, 5.0184e-04, 3.5023e-04, 4.4043e-04, 3.8351e-04, 4.0241e-04, 3.3830e-04, 4.1542e-04, 4.0076e-04, 4.0208e-04, 4.0344e-04, 4.3278e-04, 4.9188e-04, 4.5564e-04, 4.4430e-04, 4.4541e-04, 4.3550e-04, 4.6701e-04, 4.8993e-04, 4.0738e-04, 3.8969e-04, 5.1490e-04, 7.1302e-04, 4.8864e-04, 3.8855e-04, 3.2999e-04, 4.2491e-04, 4.7648e-04, 3.4645e-04, 5.2254e-04, 5.6010e-04, 3.8662e-04, 3.5142e-04, 3.4029e-04, 3.7228e-04, 4.0120e-04, 4.1318e-04, 4.3254e-04, 4.7367e-04, 4.0143e-04, 5.0057e-04, 4.5787e-04, 3.9261e-04, 4.5398e-04, 4.0455e-04, 4.7418e-04, 4.0763e-04, 4.4977e-04, 4.7905e-04, 4.1294e-04, 5.1958e-04, 4.9432e-04, 4.7486e-04, 6.2907e-04, 2.9062e-04, 4.3833e-04, 4.5652e-04, 4.3899e-04, 3.1513e-04, 6.1555e-04, 6.0965e-04, 4.7983e-04, 4.1411e-04, 3.7788e-04, 4.9957e-04, 4.5003e-04, 5.4913e-04, 4.6727e-04, 4.7057e-04, 4.0955e-04, 4.7163e-04, 4.8709e-04, 6.7562e-04, 5.4077e-04, 5.8383e-04, 4.7452e-04, 5.0139e-04, 3.3276e-04, 4.2346e-04, 4.3069e-04, 4.3933e-04, 3.7115e-04, 3.9198e-04, 4.0431e-04, 4.2940e-04, 4.6658e-04, 4.1573e-04, 4.3197e-04, 4.8181e-04, 4.0183e-04, 4.8126e-04, 5.0595e-04, 5.0810e-04, 3.3942e-04, 3.9410e-04, 4.8150e-04, 3.5587e-04, 3.4540e-04, 3.7456e-04, 4.8506e-04, 5.6433e-04, 4.7065e-04, 4.0595e-04, 4.6211e-04, 4.6983e-04, 4.2079e-04, 4.2523e-04, 4.9571e-04, 5.0767e-04, 4.4106e-04, 3.7150e-04, 5.0006e-04, 4.1816e-04, 4.1003e-04, 4.3961e-04, 3.4860e-04, 4.4986e-04, 4.3701e-04, 4.6517e-04, 4.2309e-04, 3.7114e-04, 4.7001e-04, 4.1305e-04, 4.2538e-04, 4.6933e-04, 4.6452e-04, 3.8705e-04, 4.4048e-04, 3.7428e-04, 4.3345e-04, 4.3168e-04, 3.3910e-04, 5.9631e-04, 4.5566e-04, 5.0021e-04, 3.6974e-04, 4.3276e-04, 5.4241e-04, 3.9038e-04, 3.4966e-04, 3.9512e-04, 4.1527e-04, 4.1450e-04, 5.5116e-04, 4.9556e-04, 4.2485e-04, 4.6805e-04, 4.6327e-04, 4.7100e-04, 5.3358e-04, 3.1582e-04, 3.2318e-04, 4.8711e-04, 4.0485e-04, 4.3495e-04, 4.0404e-04, 4.3764e-04, 3.9882e-04, 4.5068e-04, 5.0911e-04, 4.7521e-04, 3.5134e-04, 4.6266e-04, 4.6522e-04, 4.0109e-04, 3.5879e-04, 3.4208e-04, 3.6213e-04, 4.0624e-04, 4.2407e-04, 4.5377e-04, 5.7515e-04, 3.7872e-04, 5.2107e-04, 4.6324e-04, 4.5890e-04, 4.2164e-04, 4.4211e-04, 4.3175e-04, 3.6265e-04, 4.1270e-04, 4.1565e-04, 3.8574e-04, 3.7766e-04, 6.8402e-04, 4.4241e-04, 3.5709e-04, 3.7640e-04, 4.2964e-04, 4.2846e-04, 4.1571e-04, 3.9199e-04, 4.0744e-04, 4.2852e-04, 6.3055e-04, 4.1116e-04, 4.2582e-04, 3.7228e-04, 4.8066e-04, 4.0174e-04, 4.4870e-04, 4.4810e-04, 5.2859e-04, 3.8067e-04, 3.3185e-04, 4.4329e-04, 3.6661e-04, 3.0564e-04, 5.3150e-04, 3.8750e-04, 4.6426e-04, 3.1686e-04, 4.7382e-04, 4.4764e-04, 3.6706e-04, 5.6341e-04, 3.8732e-04, 4.1355e-04, 3.4492e-04, 5.7959e-04, 4.7940e-04, 4.2992e-04, 3.6465e-04, 4.0801e-04, 3.2912e-04, 3.7549e-04, 3.6397e-04, 4.2505e-04, 4.6315e-04, 3.8946e-04, 4.3574e-04, 5.5501e-04, 3.1996e-04, 4.1489e-04, 3.5098e-04, 3.6289e-04, 4.7280e-04, 5.2927e-04, 5.0506e-04, 4.5191e-04, 3.3436e-04, 3.5491e-04, 4.0109e-04, 4.1299e-04, 5.0848e-04, 3.8137e-04, 8.5136e-04, 4.4847e-04, 3.9214e-04, 4.3120e-04, 4.0648e-04, 4.3517e-04, 4.1127e-04, 4.2960e-04, 5.1776e-04, 4.8744e-04, 3.7563e-04, 4.7153e-04, 4.5838e-04, 4.1494e-04, 5.0593e-04, 4.6758e-04, 4.0503e-04, 4.3996e-04, 3.4790e-04, 3.5986e-04, 4.3204e-04, 5.2027e-04, 3.5123e-04, 4.8216e-04, 3.1404e-04, 3.8456e-04, 5.2763e-04, 5.6899e-04, 9.1855e-04, 4.2342e-04, 4.5851e-04, 4.3455e-04, 1.1024e-03, 3.8002e-04, 4.2376e-02, 5.7458e-04, 5.2028e-01, 4.4809e-04, 7.5443e-04, 9.1224e-04, 6.0195e-04, 6.0923e-04, 4.9872e-04, 6.9360e-04, 4.0020e-04, 4.5972e-04, 4.2017e-04, 4.5234e-04, 4.0709e-04, 4.7024e-04, 3.7015e-04, 4.6225e-04, 4.1702e-04, 5.4339e-04, 4.2613e-04, 4.5980e-04, 4.1639e-04, 3.7658e-04, 3.8485e-04, 4.1711e-04, 4.9877e-04, 4.1064e-04, 3.5803e-04, 3.8286e-04, 4.3758e-04, 4.5340e-04, 4.2598e-04, 4.0660e-04, 4.4761e-04, 3.9797e-04, 4.0179e-04, 4.3618e-04, 4.4585e-04, 8.1984e-04, 5.2861e-04, 4.2348e-04, 3.6658e-04, 3.9352e-04, 4.2454e-04, 3.8726e-04, 4.8558e-04, 3.0421e-04, 3.4838e-04, 4.1470e-04, 4.1500e-04, 3.9798e-04, 5.1333e-04, 3.1503e-04, 4.2159e-04, 5.4339e-04, 5.9444e-04, 4.5009e-04, 5.1518e-04, 4.5542e-04, 4.1264e-04, 4.5042e-04, 5.5090e-04, 3.7560e-04, 4.9031e-04, 5.4903e-04, 3.5087e-04, 3.0442e-04, 5.2565e-04, 3.6506e-04, 5.8603e-04, 4.8071e-04, 4.8793e-04, 3.9142e-04, 4.5903e-04, 4.2699e-04, 3.8758e-04, 5.1572e-04, 5.8899e-04, 6.4568e-04, 4.3165e-04, 4.3069e-04, 6.3726e-04, 4.5135e-04, 4.6813e-04, 4.6704e-04, 3.7881e-04, 4.3639e-04, 3.8727e-04, 4.7959e-04, 3.6942e-04, 4.7625e-04, 4.5278e-04, 5.9431e-04, 3.5702e-04, 4.7374e-04, 4.0496e-04, 5.1962e-04, 3.8316e-04, 5.2017e-04, 4.5571e-04, 3.5178e-04, 5.4369e-04, 4.2621e-04, 3.0484e-04, 3.8930e-04, 3.2141e-04, 4.2294e-04, 3.6261e-04, 4.6532e-04, 4.3794e-04, 4.0041e-04, 5.0256e-04, 4.8261e-04, 4.9151e-04, 5.2624e-04, 4.8550e-04, 4.3875e-04, 4.0306e-04, 4.2155e-04, 3.6226e-04, 4.3251e-04, 2.6551e-04, 3.8425e-04, 3.5975e-04, 4.6472e-04, 3.8806e-04, 3.4026e-04, 3.0533e-04, 4.1150e-04, 3.9911e-04, 4.3781e-04, 3.9229e-04, 3.2362e-04, 4.4307e-04, 4.0473e-04, 3.7460e-04, 4.7456e-04, 4.1840e-04, 4.2033e-04, 3.6642e-04, 4.5231e-04, 4.5526e-04, 4.7166e-04, 4.9362e-04, 3.9040e-04, 4.5598e-04, 4.5906e-04, 4.0637e-04, 4.2739e-04, 3.7839e-04, 5.1461e-04, 4.5005e-04, 4.0339e-04, 3.8564e-04, 3.7647e-04, 4.0522e-04, 4.3696e-04, 4.1442e-04, 3.7795e-04, 3.7380e-04, 3.5895e-04, 4.3966e-04, 3.9199e-04, 4.2048e-04, 4.9759e-04, 3.0645e-04, 5.1984e-04, 4.3722e-04, 4.5030e-04, 4.3017e-04, 4.8021e-04, 4.5581e-04, 4.3593e-04, 4.3506e-04, 4.7859e-04, 4.3548e-04, 4.2784e-04, 3.9385e-04, 4.0404e-04, 3.3503e-04, 4.1574e-04, 5.0594e-04, 4.8358e-04, 4.1679e-04, 4.3851e-04, 4.5107e-04, 3.6256e-04, 4.8860e-04, 3.8735e-04, 3.9544e-04, 4.3539e-04, 4.5019e-04, 4.9374e-04, 4.6930e-04, 4.5568e-04, 4.0662e-04, 4.0073e-04, 4.9672e-04, 3.6163e-04, 3.6753e-04, 3.2797e-04, 4.3429e-04, 3.6039e-04, 4.8083e-04, 4.9441e-04, 4.1804e-04, 5.3388e-04, 3.7527e-04, 3.9679e-04, 4.0874e-04, 4.3919e-04, 4.3335e-04, 3.8889e-04, 4.3289e-04, 4.3919e-04, 4.1057e-04, 4.6090e-04, 4.8239e-04, 4.8953e-04, 3.6945e-04, 3.5759e-04, 3.5164e-04, 4.4669e-04, 4.7043e-04, 4.5598e-04, 4.4133e-04, 6.1195e-04, 4.8564e-04, 4.3775e-04, 3.3827e-04, 3.5347e-04, 4.7730e-04, 4.4002e-04, 3.5808e-04, 4.4233e-04, 5.4717e-04, 3.8446e-04, 3.6545e-04, 3.6858e-04, 3.4726e-04, 5.1865e-04, 3.2720e-04, 3.7786e-04, 5.0747e-04, 5.5798e-04, 4.1670e-04, 5.5081e-04, 4.4159e-04, 4.2888e-04, 4.4652e-04, 4.8006e-04, 4.7594e-04, 4.4597e-04, 3.5857e-04, 4.3980e-04, 3.7845e-04, 4.3204e-04, 4.2359e-04, 3.9451e-04, 4.1124e-04, 4.3820e-04, 3.8066e-04, 4.6556e-04, 4.3002e-04, 4.5732e-04, 3.9804e-04, 3.3962e-04, 4.3907e-04, 4.0561e-04, 5.6098e-04, 4.1860e-04, 4.3270e-04, 5.0218e-04, 4.1709e-04, 4.4974e-04, 4.1423e-04, 5.3777e-04, 4.3546e-04, 3.9939e-04, 4.3868e-04, 3.4393e-04, 4.7560e-04, 3.5222e-04, 4.3126e-04, 4.4422e-04, 4.0332e-04, 3.4937e-04, 4.5367e-04, 5.2020e-04, 4.2737e-04, 4.7388e-04, 3.4594e-04, 3.8749e-04, 5.1761e-04, 4.0829e-04, 5.0520e-04, 3.8785e-04, 4.1275e-04, 3.9835e-04, 4.4644e-04, 3.8395e-04, 4.3039e-04, 4.2753e-04, 5.1172e-04, 4.4055e-04, 4.6657e-04, 4.1178e-04, 3.6590e-04, 4.2001e-04, 4.0576e-04, 6.6356e-04, 4.9282e-04, 4.0545e-04, 4.3945e-04, 4.8660e-04, 4.1850e-04, 5.6634e-04, 3.8946e-04, 5.5494e-04, 3.1386e-04, 4.2454e-04, 3.6589e-04, 3.9958e-04, 3.5329e-04, 3.8657e-04, 3.9938e-04, 5.0960e-04, 4.5511e-04, 4.2554e-04, 4.7609e-04, 4.1054e-04, 3.9795e-04, 3.7920e-04, 5.4681e-04, 4.9454e-04, 3.7916e-04, 4.3535e-04, 4.1166e-04, 3.0721e-04, 3.9556e-04, 4.4274e-04, 4.2826e-04, 3.8696e-04, 3.7174e-04, 4.5119e-04, 3.6707e-04, 4.3463e-04, 5.2616e-04, 4.7287e-04, 3.8990e-04, 4.1984e-04, 4.1348e-04, 4.7831e-04, 4.2450e-04, 3.4674e-04, 4.8648e-04, 4.2508e-04, 3.1956e-04, 4.4716e-04, 4.5032e-04, 4.1937e-04, 5.0989e-04, 5.0675e-04, 3.8688e-04, 4.8997e-04, 4.7251e-04, 5.0437e-04, 4.3455e-04, 3.8839e-04, 3.8562e-04, 4.9518e-04, 3.6539e-04, 3.6988e-04, 6.0785e-04, 3.1036e-04, 2.9186e-04, 4.0425e-04, 4.6530e-04, 4.0526e-04, 4.9587e-04, 4.3357e-04, 4.4838e-04, 3.3299e-04, 4.2879e-04, 4.4174e-04, 4.6609e-04, 3.7453e-04, 3.8578e-04, 4.7256e-04, 4.2137e-04, 5.1107e-04, 5.1986e-04, 4.3965e-04, 4.0617e-04, 4.7393e-04, 3.6509e-04, 6.4558e-04, 4.2277e-04, 3.4467e-04, 4.3336e-04, 5.7281e-04, 4.9892e-04, 4.1499e-04, 4.7781e-04, 3.6743e-04, 5.1477e-04, 4.2371e-04, 4.6797e-04, 4.1198e-04, 3.5083e-04, 4.5785e-04, 4.2356e-04, 3.8855e-04, 4.1364e-04, 4.0861e-04, 4.1897e-04, 4.8084e-04, 2.9302e-04, 3.5520e-04, 4.7523e-04, 4.4956e-04, 4.5255e-04, 4.9030e-04, 4.0121e-04, 3.9272e-04, 4.8723e-04, 4.7026e-04, 3.8965e-04, 3.2066e-04, 4.6098e-04, 4.3116e-04, 4.5329e-04, 4.4330e-04, 4.2449e-04, 3.6291e-04, 5.6372e-04, 3.6903e-04, 4.4163e-04, 4.5495e-04, 3.2066e-04, 4.1693e-04, 3.8425e-04, 4.9356e-04, 4.3436e-04, 3.4921e-04, 3.2737e-04, 3.4824e-04, 4.0881e-04, 4.0720e-04, 3.4586e-04, 4.0405e-04, 3.8066e-04, 3.4753e-04, 4.5188e-04, 4.2634e-04, 4.3400e-04, 3.5872e-04, 4.4807e-04, 4.3516e-04, 5.7358e-04, 4.7547e-04, 3.3765e-04, 3.7254e-04, 4.1131e-04, 4.7256e-04, 4.1149e-04, 4.0448e-04, 4.1360e-04, 3.4142e-04, 3.9010e-04, 4.3723e-04, 4.5427e-04, 5.3585e-04, 3.9670e-04, 4.6478e-04, 4.4069e-04, 4.9151e-04, 4.3400e-04, 5.2216e-04, 4.2777e-04, 4.5242e-04, 4.7414e-04, 4.2337e-04, 3.7343e-04, 5.8407e-04, 5.0071e-04, 3.3515e-04, 3.6219e-04, 4.0171e-04, 3.9438e-04, 5.1927e-04, 4.9254e-04, 1.6921e-03, 4.4967e-04, 5.3951e-04, 3.8446e-04, 4.8121e-04, 4.2540e-04, 3.9303e-04, 3.7447e-04, 5.5492e-04, 4.2373e-04, 3.5554e-04, 4.0471e-04, 5.1920e-04, 4.4117e-04, 4.3046e-04, 5.0641e-04, 3.9181e-04, 4.0757e-04, 9.3453e-04, 3.6344e-04, 3.7717e-04, 4.1414e-04, 4.1939e-04, 4.4401e-04, 4.0858e-04, 4.5695e-04, 3.2619e-04, 3.6976e-04, 5.3552e-04, 5.2086e-04, 4.4641e-04, 4.7151e-04, 4.0189e-04, 4.1029e-04, 4.1519e-04, 4.9640e-04, 3.3726e-04, 4.2438e-04, 3.5544e-04, 4.3992e-04, 4.9786e-04, 3.9029e-04, 5.2503e-04, 4.0911e-04, 3.8123e-04, 3.5804e-04, 4.2791e-04, 3.8122e-04, 3.6454e-04, 3.2005e-04, 4.2890e-04, 4.1120e-04, 4.0116e-04, 4.3961e-04, 3.8332e-04, 4.3514e-04, 3.8170e-04, 4.7561e-04, 3.8705e-04, 6.2027e-04, 3.7608e-04, 4.2097e-04, 3.5832e-04, 4.5330e-04, 4.2596e-04, 4.3592e-04, 4.5120e-04, 4.0573e-04, 3.9304e-04, 5.1577e-04, 4.3027e-04, 3.8312e-04, 3.9627e-04, 4.8190e-04, 4.1616e-04, 3.6293e-04, 4.4495e-04, 4.3575e-04, 4.1202e-04, 3.5248e-04, 4.4048e-04, 4.8595e-04, 4.3045e-04, 4.0626e-04, 4.0293e-04, 3.8846e-04, 3.2675e-04, 5.2317e-04, 4.2085e-04, 3.7759e-04, 4.4982e-04, 4.4438e-04, 4.0215e-04, 4.0685e-04, 4.7118e-04, 4.2825e-04, 4.6477e-04, 4.5045e-04, 3.9950e-04, 3.6113e-04, 4.9858e-04, 3.8521e-04, 5.1135e-04, 4.6867e-04, 5.9780e-04, 3.4777e-04, 3.8995e-04, 4.8033e-04, 4.6241e-04, 3.8464e-04, 4.2829e-04, 4.7900e-04, 3.6537e-04, 4.5799e-04, 5.0797e-04, 6.0914e-04, 4.2021e-04, 3.3337e-04, 4.6272e-04, 4.5406e-04, 4.7702e-04, 3.6689e-04, 5.0039e-04, 4.5959e-04, 3.7746e-04, 3.9458e-04, 4.6467e-04, 5.4885e-04, 4.6390e-04, 3.7255e-04, 3.1806e-04, 3.5741e-04, 4.5857e-04, 4.3824e-04, 5.4035e-04, 3.8011e-04, 4.0409e-04, 4.0347e-04, 3.9691e-04, 4.0031e-04, 3.4416e-04, 3.6163e-04, 4.5138e-04, 4.4829e-04, 5.0673e-04, 4.3402e-04, 4.7155e-04, 4.8777e-04, 4.4796e-04, 4.2171e-04, 3.5920e-04, 4.3177e-04, 5.0610e-04, 4.8600e-04, 4.7712e-04, 4.4492e-04, 4.6236e-04, 4.4876e-04, 4.1196e-04, 3.8150e-04, 5.1852e-04, 4.7734e-04, 4.3507e-04, 4.5883e-04, 3.7744e-04, 4.0192e-04, 4.9155e-04, 4.5647e-04, 4.1185e-04, 4.0346e-04, 4.1997e-04, 3.8529e-04, 4.6243e-04, 4.7937e-04, 4.0421e-04, 4.6883e-04, 5.0779e-04, 4.6041e-04, 4.1840e-04, 3.2461e-04, 4.8941e-04, 4.6967e-04, 4.2797e-04, 4.3335e-04, 4.0685e-04, 4.9534e-04, 5.0350e-04, 4.1330e-04, 4.5540e-04, 4.0968e-04, 3.3845e-04, 3.9327e-04, 4.6004e-04, 5.9181e-04, 4.1604e-04, 3.8111e-04, 3.8236e-04, 5.5335e-04, 4.0763e-04, 4.5947e-04, 4.2003e-04, 4.2862e-04, 4.0023e-04, 4.2980e-04, 3.9804e-04, 3.8009e-04, 5.3448e-04, 3.9022e-04, 4.0628e-04, 3.9528e-04, 4.5355e-04, 4.3673e-04, 4.6564e-04, 4.1766e-04, 3.7656e-04, 4.0519e-04, 4.3252e-04, 3.9498e-04, 4.2493e-04], grad_fn=<SoftmaxBackward0>)
In [ ]:
#dim. - nombre de classes recensées
print(prediction.shape)
torch.Size([1000])
In [ ]:
# ResNet50_Weights.DEFAULT.meta["categories"] liste les catégories possibles
# par ex. les 10 premières
ResNet50_Weights.DEFAULT.meta["categories"][:10]
Out[ ]:
['tench', 'goldfish', 'great white shark', 'tiger shark', 'hammerhead', 'electric ray', 'stingray', 'cock', 'hen', 'ostrich']
In [ ]:
# id de la classe prédite et score d'appartenance
class_id = prediction.argmax().item()
score = prediction[class_id].item()
# affichage
category_name = ResNet50_Weights.DEFAULT.meta["categories"][class_id]
print(f"{category_name}: {100 * score:.1f}%")
jaguar: 52.0%
VGG19¶
In [ ]:
#https://pytorch.org/vision/main/models/generated/torchvision.models.vgg19.html
from torchvision.models import vgg, VGG19_Weights
#instanciation
model_vgg = vgg.vgg19(weights=VGG19_Weights.DEFAULT)
model_vgg.eval()
# Step 2: Initialize the inference transforms
preprocess_vgg = VGG19_Weights.DEFAULT.transforms()
# transformation en batch
batch_vgg = preprocess_vgg(img).unsqueeze(0)
print(batch_vgg.shape)
torch.Size([1, 3, 224, 224])
In [ ]:
#degré d'appartenance aux classes
prediction_vgg = model_vgg(batch_vgg).squeeze(0).softmax(0)
print(prediction_vgg)
tensor([3.9646e-09, 5.5951e-09, 8.5981e-10, 9.3999e-09, 4.8761e-09, 2.5719e-08, 4.1786e-08, 1.0787e-07, 4.3705e-07, 2.8875e-08, 4.4426e-08, 4.2210e-09, 1.0460e-09, 2.7670e-10, 1.9707e-09, 1.9138e-09, 2.3332e-10, 6.2988e-10, 3.3776e-09, 4.6927e-10, 7.9210e-10, 3.6798e-08, 1.1224e-08, 4.8456e-09, 6.9022e-08, 1.4266e-08, 4.2918e-08, 8.5668e-10, 1.2441e-09, 1.8921e-08, 8.4561e-07, 1.7188e-08, 6.6117e-07, 7.2807e-08, 7.0068e-08, 1.2452e-07, 1.4639e-06, 3.4660e-08, 7.8685e-07, 8.0051e-08, 1.2211e-09, 3.5250e-07, 3.9893e-09, 3.3767e-08, 2.4720e-08, 5.7643e-07, 4.3994e-08, 4.5551e-08, 2.0864e-07, 7.4243e-07, 1.2712e-07, 1.3666e-07, 9.6555e-10, 4.3833e-10, 6.3555e-08, 7.8455e-09, 1.2470e-08, 7.4596e-09, 2.4407e-09, 1.7980e-09, 3.3121e-08, 9.3757e-08, 1.7612e-07, 1.9391e-08, 4.9036e-09, 1.5169e-08, 5.8702e-08, 3.1125e-08, 4.4991e-08, 7.6998e-10, 8.0477e-10, 1.0434e-10, 5.0831e-09, 1.7063e-09, 3.2792e-09, 6.8570e-11, 3.4025e-09, 6.6082e-10, 1.6179e-10, 4.0513e-10, 3.8614e-08, 1.6583e-07, 1.5168e-07, 3.6998e-08, 2.4652e-07, 4.2126e-08, 3.7010e-07, 3.5104e-10, 3.7090e-08, 1.5759e-09, 3.6624e-09, 2.6252e-09, 3.3903e-10, 3.8839e-08, 5.9188e-10, 2.2483e-10, 5.5845e-09, 3.7196e-08, 3.8480e-10, 1.5580e-09, 2.7086e-09, 9.7533e-09, 1.3354e-08, 3.1015e-09, 7.3015e-09, 5.5642e-09, 6.4242e-09, 1.5583e-10, 1.5259e-09, 7.1076e-08, 9.2029e-08, 7.6580e-10, 8.0008e-08, 1.1278e-08, 1.8194e-07, 3.9735e-08, 2.2797e-09, 5.2237e-09, 2.7669e-09, 5.9986e-09, 7.7313e-10, 1.2926e-09, 5.2487e-10, 6.2784e-08, 2.6096e-09, 1.6428e-09, 1.5605e-09, 2.9679e-10, 4.6881e-10, 1.0350e-09, 1.6680e-09, 5.1019e-10, 7.5172e-10, 6.4767e-08, 9.1006e-10, 1.0812e-09, 5.4537e-10, 3.1018e-10, 4.4639e-08, 8.0115e-09, 3.9743e-08, 7.8299e-10, 4.0483e-08, 1.6305e-10, 1.0445e-09, 4.4960e-09, 9.5305e-10, 2.3632e-09, 7.6347e-10, 2.0638e-09, 5.5246e-08, 1.6696e-08, 5.7364e-09, 1.1064e-09, 1.2323e-09, 3.7389e-09, 1.2654e-08, 3.0250e-09, 3.5405e-09, 3.5402e-08, 9.0311e-09, 2.9543e-08, 1.6912e-08, 1.9695e-08, 1.5512e-07, 1.2249e-08, 2.7745e-08, 3.1118e-08, 9.5390e-09, 2.0447e-09, 2.2885e-09, 2.0915e-08, 9.6393e-09, 2.3540e-09, 5.5010e-10, 2.2604e-09, 1.6561e-08, 1.6867e-09, 1.9516e-09, 8.3573e-09, 6.7262e-09, 6.9776e-10, 2.0604e-09, 1.2112e-08, 1.8294e-09, 2.3848e-09, 1.1902e-08, 3.5688e-09, 6.1472e-09, 4.3019e-09, 1.0019e-09, 1.0256e-08, 2.4853e-09, 1.1752e-08, 2.4844e-09, 3.2492e-09, 9.8426e-10, 1.9940e-09, 1.6142e-09, 1.7017e-09, 1.6827e-08, 3.8289e-09, 1.1276e-08, 2.6704e-09, 7.2156e-09, 9.7964e-10, 3.9468e-08, 9.4772e-09, 8.7490e-09, 1.9611e-09, 3.2410e-07, 1.7984e-08, 3.3360e-08, 3.3443e-09, 7.5935e-09, 9.7603e-09, 8.1661e-09, 1.8908e-08, 1.2985e-08, 4.0453e-08, 9.4957e-09, 1.2648e-08, 1.6620e-08, 1.7925e-09, 9.7896e-10, 2.4677e-09, 1.4069e-09, 7.8071e-09, 2.5358e-09, 1.9963e-09, 1.1432e-09, 2.9505e-09, 5.6188e-09, 9.2145e-09, 8.9048e-09, 2.9426e-09, 6.4180e-09, 2.9954e-09, 3.9353e-09, 2.5032e-08, 1.4330e-08, 3.2516e-08, 2.0218e-08, 4.5027e-08, 1.1103e-08, 2.5945e-08, 1.7997e-07, 1.1043e-08, 8.8219e-09, 4.4868e-09, 1.5423e-08, 4.3747e-06, 1.2015e-09, 1.6157e-08, 1.5574e-08, 2.0490e-09, 2.4542e-09, 9.6635e-10, 5.4965e-10, 1.0781e-09, 2.0365e-09, 2.5006e-10, 3.4865e-10, 2.9256e-09, 2.6715e-08, 1.0496e-08, 1.5948e-09, 1.4654e-08, 8.8721e-09, 7.0557e-09, 4.7064e-09, 5.4653e-08, 1.6516e-08, 3.2927e-08, 2.0833e-08, 3.3457e-07, 1.1437e-06, 8.9500e-09, 3.6415e-09, 5.4861e-09, 2.0443e-08, 5.3741e-07, 4.2468e-05, 2.8367e-09, 5.3664e-09, 8.5045e-06, 1.0773e-06, 3.7295e-05, 9.4740e-02, 1.1693e-03, 9.0267e-01, 1.7141e-07, 1.1675e-05, 1.2882e-03, 2.7203e-08, 1.4972e-08, 5.9237e-09, 1.1077e-08, 1.0679e-08, 1.7741e-08, 6.3043e-10, 3.5378e-10, 3.7183e-10, 1.7250e-08, 3.3093e-09, 5.8417e-10, 3.9486e-09, 7.9728e-09, 2.5731e-10, 9.8387e-09, 1.6259e-10, 1.5439e-08, 2.6688e-09, 7.4017e-10, 3.3915e-09, 1.5135e-10, 2.2256e-10, 1.4984e-09, 2.3750e-08, 9.2846e-11, 2.0816e-11, 7.4810e-10, 7.3445e-09, 1.0529e-09, 2.8450e-10, 1.4705e-09, 1.3614e-08, 1.6774e-08, 1.0790e-09, 1.4958e-07, 5.7965e-09, 5.4398e-09, 6.5577e-10, 7.7572e-11, 1.3785e-08, 6.9365e-09, 6.9878e-10, 6.0735e-09, 4.6508e-09, 3.9740e-09, 8.5477e-07, 1.6531e-07, 9.1447e-08, 1.1080e-08, 2.1872e-08, 7.0269e-08, 3.2792e-09, 1.3231e-09, 1.6809e-08, 3.1539e-09, 2.3469e-09, 1.5879e-09, 2.4401e-08, 3.4946e-07, 4.7905e-09, 7.1623e-08, 4.7347e-09, 2.7215e-09, 4.8289e-09, 3.1015e-09, 4.6567e-08, 8.3999e-09, 1.4118e-08, 7.5482e-09, 2.8984e-09, 3.1191e-09, 7.0056e-09, 3.2136e-09, 5.3407e-09, 4.5953e-09, 1.3263e-08, 1.3930e-08, 5.5065e-09, 4.1123e-09, 5.7007e-09, 9.0374e-09, 7.0701e-09, 1.8780e-07, 4.7334e-09, 1.2891e-08, 2.6466e-08, 2.8456e-09, 5.0049e-09, 7.8974e-08, 5.4302e-09, 5.4867e-08, 1.2539e-08, 2.2378e-07, 4.1065e-08, 1.0422e-08, 8.8240e-07, 8.1787e-09, 1.4839e-08, 1.5441e-09, 1.5995e-09, 7.3947e-08, 2.1291e-07, 1.2399e-07, 1.3962e-09, 6.8227e-09, 5.8369e-10, 3.4517e-09, 4.4978e-09, 3.2996e-10, 4.6495e-10, 2.2795e-10, 3.2567e-09, 1.6357e-09, 4.6298e-09, 4.6085e-09, 1.1187e-08, 7.4655e-09, 8.6082e-09, 3.3222e-09, 7.8566e-09, 1.0616e-08, 1.3295e-09, 1.8451e-09, 2.0900e-10, 1.0508e-09, 1.1799e-09, 2.3515e-08, 3.4320e-10, 8.7560e-09, 2.0540e-08, 8.6815e-10, 2.1092e-09, 2.1594e-09, 4.6699e-08, 4.4881e-10, 2.3771e-09, 1.9603e-10, 5.2970e-10, 1.8648e-09, 1.0859e-08, 9.8957e-09, 2.2184e-09, 7.7848e-11, 5.2176e-10, 7.4004e-10, 2.3983e-09, 5.7154e-09, 7.3746e-10, 1.5888e-09, 5.6784e-09, 2.8221e-09, 1.5634e-09, 6.8719e-09, 1.0548e-08, 1.3677e-09, 3.1948e-10, 4.6795e-10, 2.9213e-09, 5.1878e-10, 6.0728e-09, 7.5761e-10, 1.3277e-09, 4.0335e-08, 1.2532e-09, 5.1248e-09, 1.2385e-09, 5.3095e-09, 1.1714e-09, 3.7156e-09, 2.1775e-08, 4.0442e-10, 1.9130e-10, 1.1073e-08, 4.6416e-09, 1.6980e-08, 1.0441e-09, 6.9356e-09, 2.0946e-09, 8.9152e-11, 3.5548e-08, 2.5250e-09, 1.6303e-07, 3.7087e-11, 8.4080e-09, 1.1081e-08, 2.5457e-09, 2.1833e-09, 4.2080e-10, 1.0813e-07, 1.6116e-09, 4.1435e-11, 8.8162e-10, 8.7516e-09, 2.6808e-08, 1.9279e-07, 1.1527e-08, 2.5111e-09, 1.3136e-08, 4.9583e-10, 4.7771e-10, 9.8859e-10, 9.5247e-10, 8.0901e-09, 5.1814e-09, 3.0039e-09, 2.8596e-08, 2.1044e-08, 5.4258e-09, 2.7261e-09, 1.2961e-08, 9.8879e-10, 3.6398e-09, 2.0772e-09, 4.5110e-09, 5.7453e-09, 9.8957e-11, 4.2197e-09, 9.3451e-10, 1.2920e-09, 9.9231e-09, 1.1643e-09, 9.7481e-09, 8.6957e-10, 2.2647e-09, 1.9775e-08, 2.7828e-09, 1.0088e-09, 9.6458e-10, 6.0961e-09, 4.5410e-09, 2.4096e-09, 6.0606e-10, 6.2729e-10, 1.4430e-09, 2.3620e-09, 3.2627e-09, 7.4101e-10, 1.1697e-09, 7.7655e-09, 8.4194e-09, 5.2372e-10, 1.2429e-09, 1.6813e-09, 2.5627e-09, 9.5017e-08, 7.8077e-10, 5.3785e-09, 3.8226e-09, 7.6696e-10, 1.1210e-09, 1.3082e-10, 4.2351e-09, 3.3854e-10, 4.0475e-10, 1.6608e-09, 5.6825e-10, 1.2978e-10, 1.0431e-08, 1.1165e-09, 1.1064e-10, 1.2282e-09, 8.0653e-09, 4.2693e-09, 2.8768e-09, 2.0796e-09, 1.4719e-09, 6.0419e-10, 9.8903e-08, 1.2471e-09, 9.1639e-10, 4.0899e-09, 2.5895e-08, 9.4492e-10, 7.3873e-06, 1.4477e-09, 8.9182e-09, 1.0239e-09, 7.6945e-09, 6.1510e-10, 6.5305e-10, 1.1411e-08, 1.2853e-08, 6.3773e-10, 9.7383e-09, 2.0557e-09, 3.4327e-08, 1.6426e-08, 1.0444e-08, 1.9369e-09, 6.5177e-10, 6.0013e-09, 6.1130e-08, 7.0990e-10, 1.4662e-09, 4.8772e-09, 3.4909e-10, 8.5237e-09, 4.5057e-10, 3.7281e-09, 1.4737e-09, 2.8824e-10, 4.1170e-09, 1.6848e-10, 7.3211e-10, 1.0990e-07, 3.1839e-09, 1.5049e-09, 2.0074e-09, 9.7810e-09, 4.4869e-10, 9.1454e-11, 2.9209e-09, 3.6913e-09, 2.3930e-09, 1.1996e-08, 4.9495e-09, 3.0674e-08, 2.3596e-09, 3.3705e-10, 6.5758e-09, 3.2005e-10, 8.9180e-10, 1.7273e-09, 1.2897e-09, 3.9050e-09, 1.8146e-09, 7.9158e-09, 9.2514e-10, 1.8774e-09, 5.3937e-09, 2.0455e-10, 4.9505e-09, 3.4545e-09, 1.3633e-09, 1.9461e-09, 4.4504e-10, 6.7317e-10, 8.9744e-10, 3.1864e-10, 2.0268e-09, 2.0601e-10, 3.9210e-08, 5.7313e-09, 1.8377e-08, 1.6871e-08, 1.0619e-08, 2.6817e-09, 1.2865e-08, 9.9986e-08, 1.8421e-09, 5.0488e-10, 1.0337e-08, 2.2786e-10, 5.3378e-10, 1.3206e-08, 2.8596e-09, 2.8772e-09, 1.0864e-08, 1.9714e-09, 1.4120e-09, 6.2912e-09, 2.2791e-09, 2.1857e-10, 6.0018e-08, 1.6241e-09, 3.7824e-09, 7.9828e-10, 1.4246e-10, 6.3955e-08, 4.4652e-09, 5.7215e-09, 3.1865e-09, 4.9790e-10, 1.8277e-09, 1.1433e-09, 2.2961e-08, 1.1881e-08, 9.7454e-10, 1.2605e-09, 2.9625e-10, 1.2234e-09, 6.4986e-08, 5.7762e-10, 5.9631e-09, 4.4222e-09, 6.2854e-10, 2.3080e-10, 2.9624e-09, 1.6606e-08, 1.8046e-09, 4.7400e-10, 8.2060e-10, 2.1839e-09, 1.6330e-09, 4.4232e-09, 6.1510e-08, 2.6253e-10, 1.6140e-09, 4.4683e-10, 3.5715e-09, 2.9161e-08, 1.9064e-10, 1.2293e-08, 1.4991e-07, 1.7973e-09, 2.2337e-09, 1.0253e-10, 2.3625e-09, 4.7347e-08, 3.7772e-09, 4.9363e-10, 6.8067e-09, 2.3824e-09, 1.5260e-09, 3.0562e-10, 8.5075e-11, 9.4461e-09, 9.3064e-10, 5.7806e-10, 8.2392e-10, 8.5223e-10, 1.7981e-08, 8.1200e-09, 2.1484e-09, 8.5821e-09, 1.2586e-10, 1.2800e-08, 4.0721e-10, 1.5539e-09, 4.7613e-08, 1.0411e-08, 7.5842e-11, 1.8949e-09, 1.3845e-08, 5.2878e-10, 2.8896e-09, 3.8821e-09, 2.3111e-10, 1.1362e-09, 4.3759e-10, 2.7111e-08, 6.4598e-10, 5.7925e-09, 1.1927e-08, 5.7397e-09, 8.6848e-10, 1.9380e-08, 7.0350e-10, 8.0305e-08, 3.0627e-10, 3.0796e-09, 1.4125e-09, 5.1858e-09, 3.9610e-09, 2.0210e-09, 2.4485e-08, 2.1218e-09, 3.6387e-09, 3.8113e-08, 4.2335e-10, 5.9159e-10, 1.1599e-09, 2.4527e-09, 2.4163e-09, 2.3006e-10, 3.4439e-09, 1.3515e-08, 6.4121e-09, 8.6930e-09, 2.6004e-09, 2.2939e-09, 1.0852e-09, 1.7783e-10, 5.9228e-10, 1.5037e-09, 1.3001e-08, 2.7824e-09, 1.8644e-09, 9.0083e-09, 2.4272e-09, 6.8558e-08, 1.5956e-09, 1.2054e-09, 8.5763e-10, 4.5834e-09, 1.0804e-09, 2.5568e-09, 1.4059e-09, 3.5378e-10, 3.1795e-10, 1.8465e-09, 1.6412e-08, 8.7815e-09, 1.9783e-08, 8.6213e-09, 1.5302e-09, 5.0417e-08, 3.4004e-09, 8.2080e-09, 6.6983e-08, 3.6380e-10, 1.0871e-08, 3.0977e-09, 1.4599e-10, 7.4767e-09, 1.7777e-09, 2.2777e-09, 2.8702e-10, 1.7032e-09, 5.1817e-09, 3.0450e-08, 3.3948e-09, 8.2690e-11, 7.4768e-09, 1.4006e-09, 2.9193e-10, 4.5164e-10, 3.1507e-10, 6.0785e-09, 2.9865e-10, 9.9154e-09, 3.1031e-09, 1.4117e-09, 2.2633e-09, 1.2786e-08, 6.7731e-10, 2.2247e-09, 2.8743e-09, 3.3789e-09, 8.7726e-08, 2.2828e-08, 1.7510e-10, 5.0638e-10, 1.1568e-09, 5.3875e-09, 4.3730e-09, 2.4451e-08, 5.3517e-09, 4.7658e-10, 7.2555e-09, 2.2140e-09, 1.2480e-08, 7.1003e-09, 6.0325e-10, 2.0242e-08, 8.6572e-09, 1.9476e-08, 6.5331e-09, 9.9526e-09, 6.1140e-10, 2.5289e-09, 2.3003e-09, 9.6420e-09, 7.0423e-10, 4.2884e-09, 3.0956e-08, 1.8140e-09, 3.5736e-09, 1.5850e-09, 3.3110e-09, 1.1179e-08, 6.8161e-10, 1.4798e-08, 9.1292e-09, 2.0299e-09, 8.6915e-09, 3.8315e-09, 7.3265e-10, 1.3397e-08, 1.5006e-09, 2.5050e-08, 2.4256e-09, 6.1113e-10, 2.3709e-09, 3.4315e-09, 3.5384e-09, 2.7847e-10, 2.6883e-09, 1.6113e-09, 1.3265e-10, 1.1716e-09, 6.7276e-09, 5.5436e-09, 1.1078e-09, 1.4305e-08, 1.5285e-09, 9.3982e-10, 2.3399e-10, 1.6881e-08, 3.5362e-09, 1.3652e-07, 1.0807e-09, 1.9730e-08, 1.2686e-09, 2.2979e-09, 5.7350e-10, 6.6837e-09, 3.2855e-09, 3.2255e-09, 7.0887e-10, 1.8407e-10, 1.6587e-08, 1.0186e-08, 1.0303e-08, 1.1339e-09, 3.4574e-10, 3.0944e-09, 1.1775e-09, 8.1471e-09, 4.2816e-08, 4.1158e-09, 1.3030e-09, 2.2692e-08, 7.0487e-11, 6.5266e-10, 1.7421e-09, 2.2569e-08, 9.1105e-08, 9.2533e-09, 6.8601e-11, 3.0750e-09, 1.0246e-09, 4.0209e-08, 2.6690e-08, 5.0748e-09, 6.2936e-10, 1.6789e-07, 1.2123e-09, 5.9424e-10, 9.5339e-10, 1.2737e-10, 5.9964e-11, 4.8419e-10, 1.2662e-09, 2.8816e-09, 1.3012e-09, 8.0454e-09, 1.5378e-08, 9.9653e-10, 4.0452e-10, 1.9031e-10, 4.0494e-08, 2.2632e-08, 4.8175e-09, 3.3156e-09, 7.8339e-10, 1.0080e-08, 9.0281e-10, 5.4131e-09, 7.7651e-09, 3.9423e-10, 3.2199e-09, 7.1034e-08, 1.5060e-09, 6.0408e-09, 7.9293e-10, 3.3194e-09, 1.4335e-09, 1.3672e-08, 1.3094e-07, 4.5512e-08, 2.0397e-07, 2.6547e-09, 5.4853e-09, 9.7638e-10, 8.3207e-10, 1.9801e-09, 5.2259e-10, 2.6878e-09, 2.8023e-10, 2.7346e-09, 3.9139e-09, 3.9179e-09, 6.3595e-09, 4.0796e-10, 7.4453e-09, 8.1978e-09, 6.7795e-09, 5.5626e-08, 1.4700e-09, 7.4585e-09, 5.7974e-09, 8.4006e-09, 1.7469e-08, 2.6390e-08, 1.4054e-09, 3.6182e-09, 2.2440e-09, 3.9458e-08, 5.2878e-09, 5.1475e-10, 1.5273e-09, 2.3809e-09, 1.3337e-09, 3.7144e-10, 9.2649e-09, 7.5537e-08, 3.0374e-09, 6.4919e-08, 1.7986e-08, 4.5557e-09, 2.7826e-08, 5.0287e-08, 1.5103e-09, 9.4127e-09], grad_fn=<SoftmaxBackward0>)
In [ ]:
#nombre de classes recensées
prediction_vgg.shape
Out[ ]:
torch.Size([1000])
In [ ]:
#id de la classe prédite et score d'appartenance
class_id = prediction_vgg.argmax().item()
score = prediction_vgg[class_id].item()
# affichage
category_name = VGG19_Weights.DEFAULT.meta["categories"][class_id]
print(f"{category_name}: {100 * score:.1f}%")
jaguar: 90.3%