Inception(Pytorch实现)

论文在此: Going deeper with convolutions

论文下载: https://arxiv.org/pdf/1409.4842.pdf

网络结构图:

 

 

import torch
import torch.nn as nn
import torch.nn.functional as Fclass Inception3(nn.Module):def __init__(self, num_classes=1000, aux_logits=True, transform_input=False):super(Inception3, self).__init__()self.aux_logits = aux_logitsself.transform_input = transform_inputself.Conv2d_1a_3x3 = BasicConv2d(3, 32, kernel_size=3, stride=2)self.Conv2d_2a_3x3 = BasicConv2d(32, 32, kernel_size=3)self.Conv2d_2b_3x3 = BasicConv2d(32, 64, kernel_size=3, padding=1)self.Conv2d_3b_1x1 = BasicConv2d(64, 80, kernel_size=1)self.Conv2d_4a_3x3 = BasicConv2d(80, 192, kernel_size=3)self.Mixed_5b = InceptionA(192, pool_features=32)self.Mixed_5c = InceptionA(256, pool_features=64)self.Mixed_5d = InceptionA(288, pool_features=64)self.Mixed_6a = InceptionB(288)self.Mixed_6b = InceptionC(768, channels_7x7=128)self.Mixed_6c = InceptionC(768, channels_7x7=160)self.Mixed_6d = InceptionC(768, channels_7x7=160)self.Mixed_6e = InceptionC(768, channels_7x7=192)if aux_logits:self.AuxLogits = InceptionAux(768, num_classes)self.Mixed_7a = InceptionD(768)self.Mixed_7b = InceptionE(1280)self.Mixed_7c = InceptionE(2048)self.fc = nn.Linear(2048, num_classes)for m in self.modules():if isinstance(m, nn.Conv2d) or isinstance(m, nn.Linear):import scipy.stats as statsstddev = m.stddev if hasattr(m, 'stddev') else 0.1X = stats.truncnorm(-2, 2, scale=stddev)values = torch.Tensor(X.rvs(m.weight.data.numel()))values = values.view(m.weight.data.size())m.weight.data.copy_(values)elif isinstance(m, nn.BatchNorm2d):m.weight.data.fill_(1)m.bias.data.zero_()def forward(self, x):if self.transform_input:x = x.clone()x[:, 0] = x[:, 0] * (0.229 / 0.5) + (0.485 - 0.5) / 0.5x[:, 1] = x[:, 1] * (0.224 / 0.5) + (0.456 - 0.5) / 0.5x[:, 2] = x[:, 2] * (0.225 / 0.5) + (0.406 - 0.5) / 0.5# 299 x 299 x 3x = self.Conv2d_1a_3x3(x)# 149 x 149 x 32x = self.Conv2d_2a_3x3(x)# 147 x 147 x 32x = self.Conv2d_2b_3x3(x)# 147 x 147 x 64x = F.max_pool2d(x, kernel_size=3, stride=2)# 73 x 73 x 64x = self.Conv2d_3b_1x1(x)# 73 x 73 x 80x = self.Conv2d_4a_3x3(x)# 71 x 71 x 192x = F.max_pool2d(x, kernel_size=3, stride=2)# 35 x 35 x 192x = self.Mixed_5b(x)# 35 x 35 x 256x = self.Mixed_5c(x)# 35 x 35 x 288x = self.Mixed_5d(x)# 35 x 35 x 288x = self.Mixed_6a(x)# 17 x 17 x 768x = self.Mixed_6b(x)# 17 x 17 x 768x = self.Mixed_6c(x)# 17 x 17 x 768x = self.Mixed_6d(x)# 17 x 17 x 768x = self.Mixed_6e(x)# 17 x 17 x 768if self.training and self.aux_logits:aux = self.AuxLogits(x)# 17 x 17 x 768x = self.Mixed_7a(x)# 8 x 8 x 1280x = self.Mixed_7b(x)# 8 x 8 x 2048x = self.Mixed_7c(x)# 8 x 8 x 2048x = F.avg_pool2d(x, kernel_size=8)# 1 x 1 x 2048x = F.dropout(x, training=self.training)# 1 x 1 x 2048x = x.view(x.size(0), -1)# 2048x = self.fc(x)# 1000 (num_classes)if self.training and self.aux_logits:return x, auxreturn xclass InceptionA(nn.Module):def __init__(self, in_channels, pool_features):super(InceptionA, self).__init__()self.branch1x1 = BasicConv2d(in_channels, 64, kernel_size=1)self.branch5x5_1 = BasicConv2d(in_channels, 48, kernel_size=1)self.branch5x5_2 = BasicConv2d(48, 64, kernel_size=5, padding=2)self.branch3x3dbl_1 = BasicConv2d(in_channels, 64, kernel_size=1)self.branch3x3dbl_2 = BasicConv2d(64, 96, kernel_size=3, padding=1)self.branch3x3dbl_3 = BasicConv2d(96, 96, kernel_size=3, padding=1)self.branch_pool = BasicConv2d(in_channels, pool_features, kernel_size=1)def forward(self, x):branch1x1 = self.branch1x1(x)branch5x5 = self.branch5x5_1(x)branch5x5 = self.branch5x5_2(branch5x5)branch3x3dbl = self.branch3x3dbl_1(x)branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)branch_pool = self.branch_pool(branch_pool)outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool]return torch.cat(outputs, 1)class InceptionB(nn.Module):def __init__(self, in_channels):super(InceptionB, self).__init__()self.branch3x3 = BasicConv2d(in_channels, 384, kernel_size=3, stride=2)self.branch3x3dbl_1 = BasicConv2d(in_channels, 64, kernel_size=1)self.branch3x3dbl_2 = BasicConv2d(64, 96, kernel_size=3, padding=1)self.branch3x3dbl_3 = BasicConv2d(96, 96, kernel_size=3, stride=2)def forward(self, x):branch3x3 = self.branch3x3(x)branch3x3dbl = self.branch3x3dbl_1(x)branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl)branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)outputs = [branch3x3, branch3x3dbl, branch_pool]return torch.cat(outputs, 1)class InceptionC(nn.Module):def __init__(self, in_channels, channels_7x7):super(InceptionC, self).__init__()self.branch1x1 = BasicConv2d(in_channels, 192, kernel_size=1)c7 = channels_7x7self.branch7x7_1 = BasicConv2d(in_channels, c7, kernel_size=1)self.branch7x7_2 = BasicConv2d(c7, c7, kernel_size=(1, 7), padding=(0, 3))self.branch7x7_3 = BasicConv2d(c7, 192, kernel_size=(7, 1), padding=(3, 0))self.branch7x7dbl_1 = BasicConv2d(in_channels, c7, kernel_size=1)self.branch7x7dbl_2 = BasicConv2d(c7, c7, kernel_size=(7, 1), padding=(3, 0))self.branch7x7dbl_3 = BasicConv2d(c7, c7, kernel_size=(1, 7), padding=(0, 3))self.branch7x7dbl_4 = BasicConv2d(c7, c7, kernel_size=(7, 1), padding=(3, 0))self.branch7x7dbl_5 = BasicConv2d(c7, 192, kernel_size=(1, 7), padding=(0, 3))self.branch_pool = BasicConv2d(in_channels, 192, kernel_size=1)def forward(self, x):branch1x1 = self.branch1x1(x)branch7x7 = self.branch7x7_1(x)branch7x7 = self.branch7x7_2(branch7x7)branch7x7 = self.branch7x7_3(branch7x7)branch7x7dbl = self.branch7x7dbl_1(x)branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl)branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl)branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl)branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl)branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)branch_pool = self.branch_pool(branch_pool)outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool]return torch.cat(outputs, 1)class InceptionD(nn.Module):def __init__(self, in_channels):super(InceptionD, self).__init__()self.branch3x3_1 = BasicConv2d(in_channels, 192, kernel_size=1)self.branch3x3_2 = BasicConv2d(192, 320, kernel_size=3, stride=2)self.branch7x7x3_1 = BasicConv2d(in_channels, 192, kernel_size=1)self.branch7x7x3_2 = BasicConv2d(192, 192, kernel_size=(1, 7), padding=(0, 3))self.branch7x7x3_3 = BasicConv2d(192, 192, kernel_size=(7, 1), padding=(3, 0))self.branch7x7x3_4 = BasicConv2d(192, 192, kernel_size=3, stride=2)def forward(self, x):branch3x3 = self.branch3x3_1(x)branch3x3 = self.branch3x3_2(branch3x3)branch7x7x3 = self.branch7x7x3_1(x)branch7x7x3 = self.branch7x7x3_2(branch7x7x3)branch7x7x3 = self.branch7x7x3_3(branch7x7x3)branch7x7x3 = self.branch7x7x3_4(branch7x7x3)branch_pool = F.max_pool2d(x, kernel_size=3, stride=2)outputs = [branch3x3, branch7x7x3, branch_pool]return torch.cat(outputs, 1)class InceptionE(nn.Module):def __init__(self, in_channels):super(InceptionE, self).__init__()self.branch1x1 = BasicConv2d(in_channels, 320, kernel_size=1)self.branch3x3_1 = BasicConv2d(in_channels, 384, kernel_size=1)self.branch3x3_2a = BasicConv2d(384, 384, kernel_size=(1, 3), padding=(0, 1))self.branch3x3_2b = BasicConv2d(384, 384, kernel_size=(3, 1), padding=(1, 0))self.branch3x3dbl_1 = BasicConv2d(in_channels, 448, kernel_size=1)self.branch3x3dbl_2 = BasicConv2d(448, 384, kernel_size=3, padding=1)self.branch3x3dbl_3a = BasicConv2d(384, 384, kernel_size=(1, 3), padding=(0, 1))self.branch3x3dbl_3b = BasicConv2d(384, 384, kernel_size=(3, 1), padding=(1, 0))self.branch_pool = BasicConv2d(in_channels, 192, kernel_size=1)def forward(self, x):branch1x1 = self.branch1x1(x)branch3x3 = self.branch3x3_1(x)branch3x3 = [self.branch3x3_2a(branch3x3),self.branch3x3_2b(branch3x3),]branch3x3 = torch.cat(branch3x3, 1)branch3x3dbl = self.branch3x3dbl_1(x)branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl)branch3x3dbl = [self.branch3x3dbl_3a(branch3x3dbl),self.branch3x3dbl_3b(branch3x3dbl),]branch3x3dbl = torch.cat(branch3x3dbl, 1)branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1)branch_pool = self.branch_pool(branch_pool)outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool]return torch.cat(outputs, 1)class InceptionAux(nn.Module):def __init__(self, in_channels, num_classes):super(InceptionAux, self).__init__()self.conv0 = BasicConv2d(in_channels, 128, kernel_size=1)self.conv1 = BasicConv2d(128, 768, kernel_size=5)self.conv1.stddev = 0.01self.fc = nn.Linear(768, num_classes)self.fc.stddev = 0.001def forward(self, x):# 17 x 17 x 768x = F.avg_pool2d(x, kernel_size=5, stride=3)# 5 x 5 x 768x = self.conv0(x)# 5 x 5 x 128x = self.conv1(x)# 1 x 1 x 768x = x.view(x.size(0), -1)# 768x = self.fc(x)# 1000return xclass BasicConv2d(nn.Module):def __init__(self, in_channels, out_channels, **kwargs):super(BasicConv2d, self).__init__()self.conv = nn.Conv2d(in_channels, out_channels, bias=False, **kwargs)self.bn = nn.BatchNorm2d(out_channels, eps=0.001)def forward(self, x):x = self.conv(x)x = self.bn(x)return F.relu(x, inplace=True)if __name__ == '__main__':# 'Inception3'# Examplenet = Inception3()print(net)

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.mzph.cn/news/469786.shtml

如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈email:809451989@qq.com,一经查实,立即删除!

相关文章

html导出pdf实例,jsPDF导出pdf示例

jsPDF貌似不支持中文复制代码 代码如下:Downloadifybody {background: #fff; width: 500px; margin: 20px auto;}input, textarea, p { font-family: 宋体, 黑体; font-size: 12pt;}input, textarea { border: solid 1px #aaa; padding: 4px; width: 98%;}window.loadfunction(…

SecureCRT 用来当串口工具的设置

今天从淘宝网上买的USB转串口线终于到了,从网上下载了驱动,关于USB转串口驱动在我上传的资源里面有,关于SecureCRT这个串口调试工具我也上传了,是个绿色免安装版本。 刚开始的时候一步一步的设置串口,连接串口也可以连…

Brainstorm-the walkthrough example: Image Classification

(1) 运行create data,其中包括下载cifar10,并转换为hdf5格式(详见百度百科:http://baike.baidu.com/view/771949.htm#4_2): cifar10的数据简介见:http://www.cs.toronto.edu/~kriz/cifar.html cd data pyth…

卷积神经网络(高级篇) Inception Moudel

Inception Moudel 1、卷积核超参数选择困难,自动找到卷积的最佳组合。 2、1x1卷积核,不同通道的信息融合。使用1x1卷积核虽然参数量增加了,但是能够显著的降低计算量(operations) 3、Inception Moudel由4个分支组成,要分清哪些…

计算机谈音乐薛之谦,明星浮世绘之薛之谦:分析了50多首音乐作品,为其总结了五个特点...

原标题:明星浮世绘之薛之谦:分析了50多首音乐作品,为其总结了五个特点薛之谦,才华横溢思维敏捷,性格搞怪却又忧郁。我曾经用四个字来形容他,沙雕其外,金玉其中。记得老薛曾经发布了一个动态&…

linux内核下载 编译

linux内核下载网址 今天去看了一场电影“疯狂的原始人”----回来的车上看到一个老奶奶传教士,我想对自己多,加油,加油学习,深思深思 我们现在用的安霸系统,每搞一次我都会进行一次备份,一个系统加上GUI一起都有差不多一G多,而今天下载了最新的linux内核版本,才不80M左…

黑客与画家读后感

财富的定义,金钱只是媒介,财富是人们需要的东西。你如果不是富二代,只是说明你没钱,但你还是通过创造来获得财富。贫富差距不一定就是坏事。如何获得财富,你的工作要满足两个特征:可测量性,可放大性。比如一…

Deep learning

论文:doi:10.1038/nature14539 论文意义和主要内容 三巨头从机器学习谈起,指出传统机器学习的不足,总览深度学习理论、模型,给出了深度学习的发展历史,以及DL中最重要的算法和理论。 概念: 原理&#xff…

清华大学计算机学院主页,计算机图形学基础课程主页 | 清华大学计算机系

1. 2002级本科生黄其兴同学在完成图形学课作业的过程中就B样条的升阶和顶点插入算法进行了深入的研究,并在胡事民教授和Martin教授的共同指导下在国际著名刊物Computer Aided Geometric Design (CAGD)上发表文章.Fast degree elevation and knot insertion for B-s…

mkimage command not found

UIMAGE arch/arm/boot/uImage "mkimage" command not found - U-Boot images will not be built Image arch/arm/boot/uImage is ready cp: 无法获取"arch/arm/boot/uImage" 的文件状态(stat): 没有那个文件或目录 使用make uImage编译生成的内核能由ub…

克隆虚拟机 virtualbox 修改 uuid

cmd E:\Program Files\Oracle\VirtualBox>VBoxManage.exe internalcommands sethduuid "E:\Program Files\Oracle\VirtualBox VMs\115-3.vhd"转载于:https://www.cnblogs.com/lonelydreamer/p/6140931.html

第一周:深度学习引言(Introduction to Deep Learning)

1.1 欢迎(Welcome) 深度学习改变了传统互联网业务,例如如网络搜索和广告。但是深度学习同时也使得许多新产品和企业以很多方式帮助人们,从获得更好的健康关注。 深度学习做的非常好的一个方面就是读取X光图像,到生活中的个性化教育&#xf…

无忧计算机二级试题题库,全国计算机二级MS Office试题

考无忧小编为各位考生搜集整理了的二级MS Office试题,希望可以为各位的备考锦上添花,雪中送炭!记得刷计算机等级考试题库哟!1、被选中要筛选的数据单元格的下拉箭头中有哪几种筛选方式( ABD)A、全部B、前十个C、后十个D、自定义2、…

第二周:神经网络的编程基础之Python与向量化

本节课我们将来探讨Python和向量化的相关知识。 1. Vectorization 深度学习算法中,数据量很大,在程序中应该尽量减少使用循环语句,而可以使用向量运算来提高程序运行速度。 向量化(Vectorization)就是利用矩阵运算的…

U-boot移槙

1、我是照着这里去移植的 http://blog.chinaunix.net/uid-26306203-id-3716785.html 2、然后make 出现问题,到这里去有解决办法:http://blog.csdn.net/zjt289198457/article/details/6854177 : http://blog.csdn.net/zjt289198457/article/details/68…

不动产中心考试计算机测试题,2005年全国计算机二级考试VFP笔试模拟题

一、选择题(每小题 2 分,共 70分)下列各题A)、B)、C)、D)四个选项中,只有一个选项是正确的,请将正确选项涂写在答题卡相应位置上,答在试卷上不得分。(1)下列选项不符合良好程序设计风格的是________。A)源程序要文档化B)数据说明的…

第三周:浅层神经网络

1. 神经网络综述 首先,我们从整体结构上来大致看一下神经网络模型。 前面的课程中,我们已经使用计算图的方式介绍了逻辑回归梯度下降算法的正向传播和反向传播两个过程。如下图所示。神经网络的结构与逻辑回归类似,只是神经网络的层数比逻辑…

嵌入式工程师应该知道的C语言

收集的一些嵌入式软件工程师面试题目 1、将一个字符串逆序 2、将一个链表逆序 3、计算一个字节里(byte)里面有多少bit被置1 4、搜索给定的字节(byte) 5、在一个字符串中找到可能的最长的子字符串 6、字符串转换为整数 7、整数转换为字符串 …

使用二分查找向一个有序数组中插入一个数据

折半查找法找到一个元素在数组中的下标* param arr 数组* param key 要查找的元素* return 找到则返回元素在数组中的下标,如果没找到,则返回这个元素在有序数组中的位置* 如:[1,4,6,7,10,11,15],查找8在数组中的位置,…

西北大学计算机考试,西北大学计算机技术

一、培养目标本专业是为培养适合国家建设需要的、具有严谨科学态度和敬业精神的计算机应用技术人才,通过硕士阶段的培养,应具备扎实的计算机系统的专业知识,了解相关领域的研究动态,熟悉现代化计算机应用技术和软硬件环境及工具。…