Professional Documents
Culture Documents
import pandas as pd
import numpy as np
import re, os
#import gensim
import tensorflow as tf
import json
import warnings
warnings.filterwarnings("ignore")
2.
DATA_HOME = 'data/'
df = pd.read_csv(DATA_HOME + 'url_data_mega_deep_learning.csv')
df.sample(n=25).head(25)
out 2:
max_len=75
X = sequence.pad_sequences(url_int_tokens, maxlen=max_len)
target = np.array(df.isMalicious)
def print_layers_dims(model):
l_layers = model.layers
for i in range(len(l_layers)):
print(l_layers[i])
def save_model(fileModelJSON,fileWeights):
if Path(fileModelJSON).is_file():
os.remove(fileModelJSON)
json_string = model.to_json()
with open(fileModelJSON,'w' ) as f:
json.dump(json_string, f)
if Path(fileWeights).is_file():
os.remove(fileWeights)
model.save_weights(fileWeights)
def load_model(fileModelJSON,fileWeights):
model_json = json.load(f)
model = model_from_json(model_json)
model.load_weights(fileWeights)
return model
# Input
# Embedding layer
W_regularizer=W_reg)(main_input)
emb = Dropout(0.25)(emb)
def sum_1d(X):
border_mode='same')(emb)
conv = ELU()(conv)
#conv = BatchNormalization(mode=0)(conv)
conv = Dropout(0.5)(conv)
return conv
hidden1 = Dense(1024)(merged)
hidden1 = ELU()(hidden1)
hidden1 = BatchNormalization(mode=0)(hidden1)
hidden1 = Dropout(0.5)(hidden1)
hidden2 = Dense(1024)(hidden1)
hidden2 = ELU()(hidden2)
hidden2 = BatchNormalization(mode=0)(hidden2)
hidden2 = Dropout(0.5)(hidden2)
return model
epochs = 5
batch_size = 32
model = conv_fully()
print_layers_dims(model)
target_proba[0:10]
model_name = "deeplearning_1DConv"
l_layers = model.layers
weights = l_layers[1].get_weights()
weights[0].shape
test_url_mal = "naureen.net/etisalat.ae/index2.php"
test_url_benign = "sixt.com/php/reservation?language=en_US"
url = test_url_benign
max_len=75
X = sequence.pad_sequences(url_int_tokens, maxlen=max_len)