age_deploy.prototxt 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. name: "CaffeNet"
  2. input: "data"
  3. input_dim: 1
  4. input_dim: 3
  5. input_dim: 227
  6. input_dim: 227
  7. layers {
  8. name: "conv1"
  9. type: CONVOLUTION
  10. bottom: "data"
  11. top: "conv1"
  12. convolution_param {
  13. num_output: 96
  14. kernel_size: 7
  15. stride: 4
  16. }
  17. }
  18. layers {
  19. name: "relu1"
  20. type: RELU
  21. bottom: "conv1"
  22. top: "conv1"
  23. }
  24. layers {
  25. name: "pool1"
  26. type: POOLING
  27. bottom: "conv1"
  28. top: "pool1"
  29. pooling_param {
  30. pool: MAX
  31. kernel_size: 3
  32. stride: 2
  33. }
  34. }
  35. layers {
  36. name: "norm1"
  37. type: LRN
  38. bottom: "pool1"
  39. top: "norm1"
  40. lrn_param {
  41. local_size: 5
  42. alpha: 0.0001
  43. beta: 0.75
  44. }
  45. }
  46. layers {
  47. name: "conv2"
  48. type: CONVOLUTION
  49. bottom: "norm1"
  50. top: "conv2"
  51. convolution_param {
  52. num_output: 256
  53. pad: 2
  54. kernel_size: 5
  55. }
  56. }
  57. layers {
  58. name: "relu2"
  59. type: RELU
  60. bottom: "conv2"
  61. top: "conv2"
  62. }
  63. layers {
  64. name: "pool2"
  65. type: POOLING
  66. bottom: "conv2"
  67. top: "pool2"
  68. pooling_param {
  69. pool: MAX
  70. kernel_size: 3
  71. stride: 2
  72. }
  73. }
  74. layers {
  75. name: "norm2"
  76. type: LRN
  77. bottom: "pool2"
  78. top: "norm2"
  79. lrn_param {
  80. local_size: 5
  81. alpha: 0.0001
  82. beta: 0.75
  83. }
  84. }
  85. layers {
  86. name: "conv3"
  87. type: CONVOLUTION
  88. bottom: "norm2"
  89. top: "conv3"
  90. convolution_param {
  91. num_output: 384
  92. pad: 1
  93. kernel_size: 3
  94. }
  95. }
  96. layers{
  97. name: "relu3"
  98. type: RELU
  99. bottom: "conv3"
  100. top: "conv3"
  101. }
  102. layers {
  103. name: "pool5"
  104. type: POOLING
  105. bottom: "conv3"
  106. top: "pool5"
  107. pooling_param {
  108. pool: MAX
  109. kernel_size: 3
  110. stride: 2
  111. }
  112. }
  113. layers {
  114. name: "fc6"
  115. type: INNER_PRODUCT
  116. bottom: "pool5"
  117. top: "fc6"
  118. inner_product_param {
  119. num_output: 512
  120. }
  121. }
  122. layers {
  123. name: "relu6"
  124. type: RELU
  125. bottom: "fc6"
  126. top: "fc6"
  127. }
  128. layers {
  129. name: "drop6"
  130. type: DROPOUT
  131. bottom: "fc6"
  132. top: "fc6"
  133. dropout_param {
  134. dropout_ratio: 0.5
  135. }
  136. }
  137. layers {
  138. name: "fc7"
  139. type: INNER_PRODUCT
  140. bottom: "fc6"
  141. top: "fc7"
  142. inner_product_param {
  143. num_output: 512
  144. }
  145. }
  146. layers {
  147. name: "relu7"
  148. type: RELU
  149. bottom: "fc7"
  150. top: "fc7"
  151. }
  152. layers {
  153. name: "drop7"
  154. type: DROPOUT
  155. bottom: "fc7"
  156. top: "fc7"
  157. dropout_param {
  158. dropout_ratio: 0.5
  159. }
  160. }
  161. layers {
  162. name: "fc8"
  163. type: INNER_PRODUCT
  164. bottom: "fc7"
  165. top: "fc8"
  166. inner_product_param {
  167. num_output: 8
  168. }
  169. }
  170. layers {
  171. name: "prob"
  172. type: SOFTMAX
  173. bottom: "fc8"
  174. top: "prob"
  175. }