Commit 3f0a09d4 authored by Bannier Delphine's avatar Bannier Delphine
Browse files

Merge branch 'DelphineBranch' into 'master'

Delphine branch

See merge request !18
parents 04389b6c 67ac6107
This diff is collapsed.
from keras.models import Model, Sequential
from keras import backend as K
def create_dropout_predict_function(model, dropout):
"""
Create a keras function to predict with dropout
model : keras model
dropout : fraction dropout to apply to all layers
Returns
predict_with_dropout : keras function for predicting with dropout
"""
# Load the config of the original model
conf = model.get_config()
# Add the specified dropout to all layers
for layer in conf['layers']:
# Dropout layers
if layer["class_name"]=="Dropout":
layer["config"]["rate"] = dropout
# Recurrent layers with dropout
elif "dropout" in layer["config"].keys():
layer["config"]["dropout"] = dropout
# Create a new model with specified dropout
if type(model)==Sequential:
# Sequential
model_dropout = Sequential.from_config(conf)
else:
# Functional
model_dropout = Model.from_config(conf)
model_dropout.set_weights(model.get_weights())
# Create a function to predict with the dropout on
predict_with_dropout = K.function([model_dropout.inputs,K.learning_phase()], model_dropout.outputs)
print(model_dropout.inputs)
return predict_with_dropout
\ No newline at end of file
......@@ -8,8 +8,8 @@ import math
def evaluate_hybrid(model,df, trainAttrX, trainImagesX, trainY,sc):
logging.info("predicting ...")
preds = model.predict([trainAttrX, trainImagesX])
diff = sc.inverse_transform(preds.flatten()) - sc.inverse_transform(trainY)
percentDiff = (diff / sc.inverse_transform(trainY)) * 100
diff = preds.flatten() - trainY
percentDiff = (diff / trainY) * 100
absPercentDiff = np.abs(percentDiff)
mean = np.mean(absPercentDiff)
std = np.std(absPercentDiff)
......@@ -41,11 +41,10 @@ def evaluate_mlp(model,df, trainAttrX, trainY,sc):
print("mean difference : {:.2f}%, std: {:.2f}%".format(mean, std))
return preds
def compute_score(y_true, y_pred):
sigma = ( y_true - y_pred ) #########
def compute_score(y_true, y_pred, sigma):
fvc_pred = y_pred
sigma_clip = np.maximum(sigma, 70)
delta = np.minimum(abs(y_true - fvc_pred),1000)
sq2 = math.sqrt(2)
metric = -(delta / sigma_clip)*sq2 - np.log(sigma_clip* sq2)
return (sigma, np.mean(metric))
\ No newline at end of file
return (np.mean(metric))
\ No newline at end of file
......@@ -53,6 +53,7 @@ def create_cnn(width, height, depth, filters=(32, 64, 128), regress=False, type_
# la couche suivante ne sert à rien pour l'hybride, à garder pour le modèle cnn seule
if type_cnn == 'simple':
if regress:
x = Dropout(0.5)(x)
x = Dense(1, activation="linear")(x)
# construct the CNN
model = Model(inputs, x)
......@@ -88,6 +89,7 @@ def create_hybrid(nb_attributes,shape=(240,240,1)):
cnn = create_cnn(*shape,type_cnn='hybrid')
combinedInput = concatenate([mlp.output, cnn.output])
x = Dense(4, activation="tanh")(combinedInput) #tanh
x = Dropout(0.5)(x)
x = Dense(1, activation="linear")(x)
model = Model(inputs=[mlp.input, cnn.input], outputs=x)
return model
......@@ -161,6 +163,7 @@ def create_hybrid_transfer(nb_attributes,new_model, custom_model, modify_name,in
cnn = create_transfer_learning(new_model, custom_model, modify_name,input_channel)
combinedInput = concatenate([mlp.output, cnn.output])
x = Dense(4, activation="tanh")(combinedInput)
x = Dropout(0.5)(x)
x = Dense(1, activation="linear")(x)
model = Model(inputs=[mlp.input, cnn.input], outputs=x)
return model
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment