简体   繁体   中英

Multiple Softmax in Dense Layer

I have a network, I want to apply softmax on dense layer. I have dense layer of shape (?, 312), I want to apply softmax on dense layer on units 1-9, 10-18...etc. I don't know how to do that. I mentioned an image below, I want something like this. 在此输入图像描述

I tried something like this but this does not seems right to me as I am taking whole 312 units and convert it to another dense layer. I want to apply directly on 312 unit.

from keras.layers import Input, Dense, Conv2D, BatchNormalization, Activation, MaxPooling2D, Dropout, Flatten
from keras.models import Model
from keras.layers import concatenate
class CubNet:
    @staticmethod
    def main_network(inputs,chanDim):

        inputs = inputs
        x = Conv2D(32, (3, 3), padding="same")(inputs)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = Dropout(0.25)(x)
        #(CONV => RELU) * 2 => POOL
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = Conv2D(64, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)

        # (CONV => RELU) * 2 => POOL
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)
        return x
    @staticmethod
    def category_branch(inputs,numCategories,chanDim):
        inputs = inputs
        x = Conv2D(128, (3, 3), padding="same")(inputs)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = Dropout(0.25)(x)
        # (CONV => RELU) * 2 => POOL
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = Conv2D(128, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)
        x = Flatten()(x)
        x = Dense(1024)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        x = Dropout(0.5)(x)

        # softmax classifier
        x = Dense(numCategories)(x)
        x = Activation("softmax", name = "category_output")(x)

        # return the constructed network architecture
        return x
    def Attribute_branch(inputs, numAtt, chanDim):
        inputs = inputs
        x = Conv2D(16, (3, 3), padding="same")(inputs)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(3, 3))(x)
        x = Dropout(0.25)(x)

        # CONV => RELU => POOL
        x = Conv2D(32, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)

        # CONV => RELU => POOL
        x = Conv2D(32, (3, 3), padding="same")(x)
        x = Activation("relu")(x)
        x = BatchNormalization(axis=chanDim)(x)
        x = MaxPooling2D(pool_size=(2, 2))(x)
        x = Dropout(0.25)(x)
        # define a branch of output layers for the number of different
        # colors (i.e., red, black, blue, etc.)
        x = Flatten()(x)
        x = Dense(312)(x)
        x = Activation("relu")(x)
        x = BatchNormalization()(x)
        print(x.shape)
        x1 = Dense(9)(x)
        x2 = Dense(15)(x)
        x3 = Dense(15)(x)
        x4 = Dense(15)(x)
        x5 = Dense(4)(x)
        x6 = Dense(15)(x)
        x7 = Dense(6)(x)
        x8 = Dense(15)(x)
        x9 = Dense(11)(x)
        x10 = Dense(15)(x)
        x11 = Dense(15)(x)
        x12 = Dense(14)(x)
        x13 = Dense(3)(x)
        x14 = Dense(15)(x)
        x15 = Dense(15)(x)
        x16 = Dense(15)(x)
        x17 = Dense(15)(x)
        x18 = Dense(5)(x)
        x19 = Dense(5)(x)
        x20 = Dense(14)(x)
        x21 = Dense(4)(x)
        x22 = Dense(4)(x)
        x23 = Dense(4)(x)
        x24 = Dense(15)(x)
        x25 = Dense(15)(x)
        x26 = Dense(15)(x)
        x27 = Dense(15)(x)
        #x28 = Dense(4)(x)
        x1 = Activation("softmax", name = "has_bill_shape")(x1)
        x2 = Activation("softmax", name = "has_wing_color")(x2)
        x3 = Activation("softmax", name = "has_upperpart_color")(x3)
        x4 = Activation("softmax", name = "has_underpart_color")(x4)
        x5 = Activation("softmax", name = "has_breast_pattern")(x5)
        x6 = Activation("softmax", name = "has_back_color")(x6)
        x7 = Activation("softmax", name = "has_tail_shape")(x7)
        x8 = Activation("softmax", name = "has_uppertail_color")(x8)
        x9 = Activation("softmax", name = "has_head_pattern")(x9)
        x10 = Activation("softmax", name = "has_breast_color")(x10)
        x11 = Activation("softmax", name = "has_throat_color")(x11)
        x12 = Activation("softmax", name = "has_eye_color")(x12)
        x13 = Activation("softmax", name = "has_bill_length")(x13)
        x14 = Activation("softmax", name = "has_forhead_color")(x14)
        x15 = Activation("softmax", name = "has_undertail_color")(x15)
        x16 = Activation("softmax", name = "has_nape_color")(x16)
        x17 = Activation("softmax", name = "has_belly_color")(x17)
        x18 = Activation("softmax", name = "has_wing_shape")(x18)
        x19 = Activation("softmax", name = "has_size")(x19)
        x20 = Activation("softmax", name = "has_shape")(x20)
        x21 = Activation("softmax", name = "has_back_pattern")(x21)
        x22 = Activation("softmax", name = "has_tail_pattern")(x22)
        x23 = Activation("softmax", name = "has_belly_pattrern")(x23)
        x24 = Activation("softmax", name = "has_primary_color")(x24)
        x25 = Activation("softmax", name = "has_leg_color")(x25)
        x26 = Activation("softmax", name = "has_bill_color")(x26)
        x27 = Activation("softmax", name = "has_crown_color")(x27)
        #x28 = Activation("softmax", name = "has_wing_pattern")(x28)
        x = concatenate([x1,x2,x3,x4,x5,x6,x7,x8,x9,x10,x11,x12,x13,x14,x15,x16,x17,x18,x19,x20,x21,x21,x22,x23
                        ,x24,x25,x26,x27], name="Attribute_output" )
        #print(x.shape)
        return x

    @staticmethod
    def Build(numCategories, numAttributes, finalAct="softmax"):
        inputshape = (299,299,3)
        chanDim = -1
        inputs = Input(shape=inputshape)
        main_output = CubNet.main_network(inputs, chanDim = chanDim)
        categoryBranch = CubNet.category_branch(main_output,numCategories, chanDim=chanDim)
        AttBranch = CubNet.Attribute_branch(main_output, numAttributes, chanDim=chanDim)

        model = Model(inputs=inputs, outputs=[categoryBranch, AttBranch], name ='Cub')
        return model
model = CubNet.Build(numCategories=200, numAttributes=312, finalAct="softmax")
losses = {"category_output" : "categorical_crossentropy",
         "Attribute_output" : "binary_crossentropy"}

loss_weight = {"category_output" : 1.0,
         "Attribute_output" : 1.0}

print("[INFO] Compiling Model....")
opt = Adam(lr = INIT_LR, decay = INIT_LR/EPOCHS)
model.compile(optimizer=opt, loss=losses, loss_weights=loss_weight, metrics=["accuracy"])

H = model.fit(trainX, {"category_output": trainCategoryY, "Attribute_output": trainAttY},
              validation_data = (valX,{"category_output": valCategoryY, "Attribute_output": valAttY}),
                        epochs= EPOCHS, verbose=1)
print("[INFO] serializing network....")
model.save("ATT_categorical.h5")

Hope someone will answer. Here is the link of the same question but this is not working as dense layer don't take 2 arguments.

I think the simplest way is using a Reshape layer and then apply the softmax along the correct axis:

from keras.layers import Input, Lambda, Reshape
from keras.models import Model
from keras.activations import softmax
import numpy as np


inp = Input(shape=(312,1))
x = Reshape((78,4,1))(inp)
out = Lambda(lambda x: softmax(x, axis=2))(x)

model = Model(inp, out)

output = model.predict(np.zeros((1,312,1)))

Note that Reshape does not require the batch size.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM