1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
| model = torch.hub.load('pytorch/vision:v0.6.0', 'mobilenet_v2', pretrained=True) model.eval() for x in model.parameters(): x.requires_grad = False device = 'cuda' if torch.cuda.is_available() else 'cpu' model.classifier = nn.Linear(1280,2) model = model.to(device) summary(model,input_size=(1,3,244,244)) device = 'cuda' if torch.cuda.is_available() else 'cpu' model.classifier = nn.Linear(1280,2) model = model.to(device) summary(model,input_size=(1,3,244,244)) ========================================================================================== Layer (type:depth-idx) Output Shape Param ========================================================================================== ├─Sequential: 1-1 [1, 1280, 8, 8] -- | └─ConvBNReLU: 2-1 [1, 32, 122, 122] -- | | └─Conv2d: 3-1 [1, 32, 122, 122] (864) | | └─BatchNorm2d: 3-2 [1, 32, 122, 122] (64) | | └─ReLU6: 3-3 [1, 32, 122, 122] -- | └─InvertedResidual: 2-2 [1, 16, 122, 122] -- | | └─Sequential: 3-4 [1, 16, 122, 122] (896) | └─InvertedResidual: 2-3 [1, 24, 61, 61] -- | | └─Sequential: 3-5 [1, 24, 61, 61] (5,136) | └─InvertedResidual: 2-4 [1, 24, 61, 61] -- | | └─Sequential: 3-6 [1, 24, 61, 61] (8,832) | └─InvertedResidual: 2-5 [1, 32, 31, 31] -- | | └─Sequential: 3-7 [1, 32, 31, 31] (10,000) | └─InvertedResidual: 2-6 [1, 32, 31, 31] -- | | └─Sequential: 3-8 [1, 32, 31, 31] (14,848) | └─InvertedResidual: 2-7 [1, 32, 31, 31] -- | | └─Sequential: 3-9 [1, 32, 31, 31] (14,848) | └─InvertedResidual: 2-8 [1, 64, 16, 16] -- | | └─Sequential: 3-10 [1, 64, 16, 16] (21,056) | └─InvertedResidual: 2-9 [1, 64, 16, 16] -- | | └─Sequential: 3-11 [1, 64, 16, 16] (54,272) | └─InvertedResidual: 2-10 [1, 64, 16, 16] -- | | └─Sequential: 3-12 [1, 64, 16, 16] (54,272) | └─InvertedResidual: 2-11 [1, 64, 16, 16] -- | | └─Sequential: 3-13 [1, 64, 16, 16] (54,272) | └─InvertedResidual: 2-12 [1, 96, 16, 16] -- | | └─Sequential: 3-14 [1, 96, 16, 16] (66,624) | └─InvertedResidual: 2-13 [1, 96, 16, 16] -- | | └─Sequential: 3-15 [1, 96, 16, 16] (118,272) | └─InvertedResidual: 2-14 [1, 96, 16, 16] -- | | └─Sequential: 3-16 [1, 96, 16, 16] (118,272) | └─InvertedResidual: 2-15 [1, 160, 8, 8] -- | | └─Sequential: 3-17 [1, 160, 8, 8] (155,264) | └─InvertedResidual: 2-16 [1, 160, 8, 8] -- | | └─Sequential: 3-18 [1, 160, 8, 8] (320,000) | └─InvertedResidual: 2-17 [1, 160, 8, 8] -- | | └─Sequential: 3-19 [1, 160, 8, 8] (320,000) | └─InvertedResidual: 2-18 [1, 320, 8, 8] -- | | └─Sequential: 3-20 [1, 320, 8, 8] (473,920) | └─ConvBNReLU: 2-19 [1, 1280, 8, 8] -- | | └─Conv2d: 3-21 [1, 1280, 8, 8] (409,600) | | └─BatchNorm2d: 3-22 [1, 1280, 8, 8] (2,560) | | └─ReLU6: 3-23 [1, 1280, 8, 8] -- ├─Linear: 1-2 [1, 2] 2,562 ========================================================================================== Total params: 2,226,434 Trainable params: 2,562 Non-trainable params: 2,223,872 Total mult-adds (M): 196.40 ========================================================================================== Input size (MB): 0.71 Forward/backward pass size (MB): 20.12 Params size (MB): 8.91 Estimated Total Size (MB): 29.74 ==========================================================================================
|