Commit f5cf4314 authored by Billy Amélie's avatar Billy Amélie
Browse files

Merge branch 'AmelieBranch' into 'master'

competition score + transfer learning modification + model retraining

See merge request !21
parents f631da4a 4e913ea3
This diff is collapsed.
This diff is collapsed.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -143,24 +143,26 @@ def weightify(model_orig, custom_model, layer_modify,input_channel):
target_layer.trainable = False
return custom_model
def create_transfer_learning(new_model, custom_model, modify_name,input_channel = 4):
def create_transfer_learning(new_model, custom_model, modify_name,input_channel = 4, weights = False, hybrid = False):
# create cnn with transfer learning
new = weightify(new_model,custom_model,modify_name,input_channel)
x = new.output
x = GlobalAveragePooling2D()(x)
x = Dropout(0.5)(x)
#x = Dense(1)(x)
if hybrid == False :
x = Dense(1, activation='linear')(x)
model = Model(new.input, x)
if weights == True:
for layer in new.layers:
layer.trainable = False
return model
def create_hybrid_transfer(nb_attributes,new_model, custom_model, modify_name,input_channel):
def create_hybrid_transfer(nb_attributes,new_model, custom_model, modify_name,input_channel,weight = False):
# create cnn and mlp models
mlp = create_mlp(nb_attributes)
cnn = create_transfer_learning(new_model, custom_model, modify_name,input_channel)
cnn = create_transfer_learning(new_model, custom_model, modify_name,input_channel,weights = weight, hybrid = True)
combinedInput = concatenate([mlp.output, cnn.output])
x = Dense(4, activation="tanh")(combinedInput)
x = Dropout(0.5)(x)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment