Skip to content

Instantly share code, notes, and snippets.

@edunuke
Created June 4, 2019 17:21
Show Gist options
  • Select an option

  • Save edunuke/8b537185e90ba5c1c047de554ec5222c to your computer and use it in GitHub Desktop.

Select an option

Save edunuke/8b537185e90ba5c1c047de554ec5222c to your computer and use it in GitHub Desktop.
def init(params):
try:
regularizer = params["regularizer"]
constraint = keras.constraints.UnitNorm(axis=0)
risk_embed_dim =int(np.round(len(set(risk_df.credit_score)) ** (1/4)))
products_embed_dim =int(np.round(len(set(products_df.product_code)) ** (1/4)))
user_embed_dim =int(np.round(len(set(users_df.client_id)) ** (1/4)))
# user branch
user_input = keras.layers.Input(shape=(user_shape[-1]-1,), name='user_input')
user_embed =keras.layers.Embedding(input_dim = n_users, output_dim = user_embed_dim,
embeddings_constraint=None, #non_neg()
name='user_embed')(user_input)
user_vec = keras.layers.Flatten(name='user_vec')(user_embed)
# risk branch
risk_input = keras.layers.Input(shape=(risk_shape[-1]-1,), name='risk_input')
risk_embed = keras.layers.Embedding(input_dim =n_risk,
output_dim = products_embed_dim,
embeddings_constraint=None, #non_neg()
name='risk_embed')(risk_input)
risk_vec = keras.layers.Flatten(name='risk_vec')(risk_embed)
# products branch
products_input = keras.layers.Input(shape=(product_val.shape[-1]-1,), name='products_input')
products_embed = keras.layers.Embedding(input_dim = n_products,
output_dim = products_embed_dim,
embeddings_constraint=None, #non_neg()
name='products_embed')(products_input)
products_vec = keras.layers.Flatten(name='products_vec')(products_embed)
# Merge all branches
concat_layer = keras.layers.concatenate([user_vec, risk_vec, products_vec], name='concat_layer')
concat_layer = keras.layers.normalization.BatchNormalization()(concat_layer)
# Start Fully connected layers
nn = keras.layers.Dropout(0.8)(keras.layers.Dense(params["nodes1"],
activation='relu',
use_bias=False,
kernel_constraint = constraint,
activity_regularizer=regularizer)(concat_layer))
nn = keras.layers.normalization.BatchNormalization()(nn)
nn = keras.layers.Dropout(0.8)(keras.layers.Dense(params["nodes2"],
activation='relu',
kernel_constraint = constraint,
activity_regularizer=regularizer)(nn))
nn = keras.layers.normalization.BatchNormalization()(nn)
nn = keras.layers.Dropout(0.8)(keras.layers.Dense(params["nodes3"],
activation='relu',
kernel_constraint = constraint,
activity_regularizer=regularizer)(nn))
nn = keras.layers.normalization.BatchNormalization()(nn)
nn = keras.layers.Dropout(0.8)(keras.layers.Dense(params["nodes4"],
activation='relu',
kernel_constraint = constraint,
activity_regularizer=regularizer)(nn))
nn = keras.layers.Dropout(0.8)(keras.layers.Dense(params["nodes5"],
activation='relu',
kernel_constraint = constraint,
activity_regularizer=regularizer)(nn))
nn = keras.layers.normalization.BatchNormalization()(nn)
# sigmoid activation
result = keras.layers.Dense(n_products, activation='softmax', name='class_result')(nn)
model = keras.models.Model(inputs=[user_input, products_input,risk_input],
outputs=[result])
losses = {"class_result": 'categorical_crossentropy',}
lossWeights = {"class_result": 1.0,}
metric = {"class_result": 'accuracy',}
model.compile(optimizer=params['optimizer'],
loss=losses,
loss_weights=lossWeights,
metrics=metric)
plot_model(model,to_file='images/model.png',show_shapes=True)
print(model.summary())
#early stopping
stopping_criteria = keras.callbacks.EarlyStopping(monitor='val_loss',
min_delta=0.001,
patience=2,
verbose=1,
mode='auto')
NUM_EPOCHS = 500
BATCH_SIZE = params["batch_size"]
def poly_decay(epoch):
#decay scheduler
maxEpochs = 500
baseLR = 0.2
power = 0.3
alpha = baseLR * (1 - (epoch / float(maxEpochs))) ** power
return alpha
if RESAMPLED == True:
class_weights = None
else:
class_weights = class_weight.compute_class_weight('balanced',
products_df[TARGET_NAME].unique(),
products_df[TARGET_NAME].values)
#model fit
model_hist = model.fit(input_train,
{
"class_result": y_class_train.values,
},
validation_data=(input_val,
{
"class_result": y_class_val.values,
}),
epochs = NUM_EPOCHS,
batch_size = BATCH_SIZE,
verbose = 1,
shuffle=True,
class_weight={
"class_result":class_weights,
},
callbacks = [stopping_criteria,
keras.callbacks.LearningRateScheduler(poly_decay)])
score = model_hist.model.evaluate(input_val,
{"class_result": y_class_val.values,},
verbose=0)
accuracy = -score[1]
return {'status':STATUS_OK,
'model': model_hist,
'loss': accuracy
}
except BaseException as e:
print(e)
return {'status':STATUS_OK,
'model': model_hist,
'loss': accuracy
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment