|
| 1 | +/* |
| 2 | + * Copyright 2016 The BigDL Authors. |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +package com.intel.analytics.bigdl.models.alexnet |
| 18 | + |
| 19 | +import com.intel.analytics.bigdl._ |
| 20 | +import com.intel.analytics.bigdl.mkl.Memory |
| 21 | +import com.intel.analytics.bigdl.numeric.NumericFloat |
| 22 | +import com.intel.analytics.bigdl.nn._ |
| 23 | + |
| 24 | +object AlexNet { |
| 25 | + def apply(classNum: Int, hasDropout : Boolean = true): Module[Float] = { |
| 26 | + val model = Sequential() |
| 27 | + model.add(SpatialConvolution(3, 96, 11, 11, 4, 4, 0, 0, 1, false).setName("conv1")) |
| 28 | + model.add(ReLU(true).setName("relu1")) |
| 29 | + model.add(SpatialCrossMapLRN(5, 0.0001, 0.75).setName("norm1")) |
| 30 | + model.add(SpatialMaxPooling(3, 3, 2, 2).setName("pool1")) |
| 31 | + model.add(SpatialConvolution(96, 256, 5, 5, 1, 1, 2, 2, 2).setName("conv2")) |
| 32 | + model.add(ReLU(true).setName("relu2")) |
| 33 | + model.add(SpatialCrossMapLRN(5, 0.0001, 0.75).setName("norm2")) |
| 34 | + model.add(SpatialMaxPooling(3, 3, 2, 2).setName("pool2")) |
| 35 | + model.add(SpatialConvolution(256, 384, 3, 3, 1, 1, 1, 1).setName("conv3")) |
| 36 | + model.add(ReLU(true).setName("relu3")) |
| 37 | + model.add(SpatialConvolution(384, 384, 3, 3, 1, 1, 1, 1, 2).setName("conv4")) |
| 38 | + model.add(ReLU(true).setName("relu4")) |
| 39 | + model.add(SpatialConvolution(384, 256, 3, 3, 1, 1, 1, 1, 2).setName("conv5")) |
| 40 | + model.add(ReLU(true).setName("relu5")) |
| 41 | + model.add(SpatialMaxPooling(3, 3, 2, 2).setName("pool5")) |
| 42 | + model.add(View(256 * 6 * 6)) |
| 43 | + model.add(Linear(256 * 6 * 6, 4096).setName("fc6")) |
| 44 | + model.add(ReLU(true).setName("relu6")) |
| 45 | + if (hasDropout) model.add(Dropout(0.5).setName("drop6")) |
| 46 | + model.add(Linear(4096, 4096).setName("fc7")) |
| 47 | + model.add(ReLU(true).setName("relu7")) |
| 48 | + if (hasDropout) model.add(Dropout(0.5).setName("drop7")) |
| 49 | + model.add(Linear(4096, classNum).setName("fc8")) |
| 50 | + model.add(LogSoftMax().setName("loss")) |
| 51 | + model |
| 52 | + } |
| 53 | + |
| 54 | + def graph(classNum: Int, hasDropout : Boolean = true): Module[Float] = { |
| 55 | + val conv1 = SpatialConvolution(3, 96, 11, 11, 4, 4, 0, 0, 1, false) |
| 56 | + .setName("conv1").inputs() |
| 57 | + val relu1 = ReLU(true).setName("relu1").inputs(conv1) |
| 58 | + val norm1 = SpatialCrossMapLRN(5, 0.0001, 0.75).setName("norm1").inputs(relu1) |
| 59 | + val pool1 = SpatialMaxPooling(3, 3, 2, 2).setName("pool1").inputs(norm1) |
| 60 | + val conv2 = SpatialConvolution(96, 256, 5, 5, 1, 1, 2, 2, 2).setName("conv2").inputs(pool1) |
| 61 | + val relu2 = ReLU(true).setName("relu2").inputs(conv2) |
| 62 | + val norm2 = SpatialCrossMapLRN(5, 0.0001, 0.75).setName("norm2").inputs(relu2) |
| 63 | + val pool2 = SpatialMaxPooling(3, 3, 2, 2).setName("pool2").inputs(norm2) |
| 64 | + val conv3 = SpatialConvolution(256, 384, 3, 3, 1, 1, 1, 1).setName("conv3").inputs(pool2) |
| 65 | + val relu3 = ReLU(true).setName("relu3").inputs(conv3) |
| 66 | + val conv4 = SpatialConvolution(384, 384, 3, 3, 1, 1, 1, 1, 2).setName("conv4").inputs(relu3) |
| 67 | + val relu4 = ReLU(true).setName("relu4").inputs(conv4) |
| 68 | + val conv5 = SpatialConvolution(384, 256, 3, 3, 1, 1, 1, 1, 2).setName("conv5").inputs(relu4) |
| 69 | + val relu5 = ReLU(true).setName("relu5").inputs(conv5) |
| 70 | + val pool5 = SpatialMaxPooling(3, 3, 2, 2).setName("pool5").inputs(relu5) |
| 71 | + val view1 = View(256 * 6 * 6).inputs(pool5) |
| 72 | + val fc6 = Linear(256 * 6 * 6, 4096).setName("fc6").inputs(view1) |
| 73 | + val relu6 = ReLU(true).setName("relu6").inputs(fc6) |
| 74 | + val drop6 = if (hasDropout) Dropout(0.5).setName("drop6").inputs(relu6) else relu6 |
| 75 | + val fc7 = Linear(4096, 4096).setName("fc7").inputs(drop6) |
| 76 | + val relu7 = ReLU(true).setName("relu7").inputs(fc7) |
| 77 | + val drop7 = if (hasDropout) Dropout(0.5).setName("drop7").inputs(relu7) else relu7 |
| 78 | + val fc8 = Linear(4096, classNum).setName("fc8").inputs(drop7) |
| 79 | + val loss = LogSoftMax().setName("loss").inputs(fc8) |
| 80 | + Graph(conv1, loss) |
| 81 | + } |
| 82 | + |
| 83 | +} |
0 commit comments