fork download
  1. name: "CaffeNet"
  2. layer {
  3. name: "data"
  4. type: "Input"
  5. top: "data"
  6. input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
  7. }
  8. layer {
  9. name: "conv1"
  10. type: "Convolution"
  11. bottom: "data"
  12. top: "conv1"
  13. convolution_param {
  14. num_output: 96
  15. kernel_size: 11
  16. stride: 4
  17. }
  18. }
  19. layer {
  20. name: "relu1"
  21. type: "ReLU"
  22. bottom: "conv1"
  23. top: "conv1"
  24. }
  25. layer {
  26. name: "pool1"
  27. type: "Pooling"
  28. bottom: "conv1"
  29. top: "pool1"
  30. pooling_param {
  31. pool: MAX
  32. kernel_size: 3
  33. stride: 2
  34. }
  35. }
  36. layer {
  37. name: "norm1"
  38. type: "LRN"
  39. bottom: "pool1"
  40. top: "norm1"
  41. lrn_param {
  42. local_size: 5
  43. alpha: 0.0001
  44. beta: 0.75
  45. }
  46. }
  47. layer {
  48. name: "conv2"
  49. type: "Convolution"
  50. bottom: "norm1"
  51. top: "conv2"
  52. convolution_param {
  53. num_output: 256
  54. pad: 2
  55. kernel_size: 5
  56. group: 2
  57. }
  58. }
  59. layer {
  60. name: "relu2"
  61. type: "ReLU"
  62. bottom: "conv2"
  63. top: "conv2"
  64. }
  65. layer {
  66. name: "pool2"
  67. type: "Pooling"
  68. bottom: "conv2"
  69. top: "pool2"
  70. pooling_param {
  71. pool: MAX
  72. kernel_size: 3
  73. stride: 2
  74. }
  75. }
  76. layer {
  77. name: "norm2"
  78. type: "LRN"
  79. bottom: "pool2"
  80. top: "norm2"
  81. lrn_param {
  82. local_size: 5
  83. alpha: 0.0001
  84. beta: 0.75
  85. }
  86. }
  87. layer {
  88. name: "conv3"
  89. type: "Convolution"
  90. bottom: "norm2"
  91. top: "conv3"
  92. convolution_param {
  93. num_output: 384
  94. pad: 1
  95. kernel_size: 3
  96. }
  97. }
  98. layer {
  99. name: "relu3"
  100. type: "ReLU"
  101. bottom: "conv3"
  102. top: "conv3"
  103. }
  104. layer {
  105. name: "conv5-new"
  106. type: "Convolution"
  107. bottom: "conv3"
  108. top: "conv5-new"
  109. convolution_param {
  110. num_output: 256
  111. pad: 1
  112. kernel_size: 3
  113. group: 2
  114. }
  115. }
  116. layer {
  117. name: "relu5-new"
  118. type: "ReLU"
  119. bottom: "conv5-new"
  120. top: "conv5-new"
  121. }
  122. layer {
  123. name: "pool5-new"
  124. type: "Pooling"
  125. bottom: "conv5-new"
  126. top: "pool5-new"
  127. pooling_param {
  128. pool: MAX
  129. kernel_size: 3
  130. stride: 2
  131. }
  132. }
  133. layer {
  134. name: "fc6"
  135. type: "InnerProduct"
  136. bottom: "pool5-new"
  137. top: "fc6"
  138. inner_product_param {
  139. num_output: 4096
  140. }
  141. }
  142. layer {
  143. name: "relu6"
  144. type: "ReLU"
  145. bottom: "fc6"
  146. top: "fc6"
  147. }
  148. layer {
  149. name: "drop6"
  150. type: "Dropout"
  151. bottom: "fc6"
  152. top: "fc6"
  153. dropout_param {
  154. dropout_ratio: 0.5
  155. }
  156. }
  157. layer {
  158. name: "fc7"
  159. type: "InnerProduct"
  160. bottom: "fc6"
  161. top: "fc7"
  162. inner_product_param {
  163. num_output: 4096
  164. }
  165. }
  166. layer {
  167. name: "relu7"
  168. type: "ReLU"
  169. bottom: "fc7"
  170. top: "fc7"
  171. }
  172. layer {
  173. name: "drop7"
  174. type: "Dropout"
  175. bottom: "fc7"
  176. top: "fc7"
  177. dropout_param {
  178. dropout_ratio: 0.5
  179. }
  180. }
  181. layer {
  182. name: "fc8"
  183. type: "InnerProduct"
  184. bottom: "fc7"
  185. top: "fc8"
  186. inner_product_param {
  187. num_output: 1000
  188. }
  189. }
  190. layer {
  191. name: "prob"
  192. type: "Softmax"
  193. bottom: "fc8"
  194. top: "prob"
  195. }
Not running #stdin #stdout 0s 0KB
stdin
Standard input is empty
stdout
Standard output is empty