You are on page 1of 77

from 

keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))
model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_hinge', optimizer='adam', metrics=['accuracy'])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 25, epochs = 20)
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 20, epochs = 40)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))
model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 30, epochs = 20)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))
model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='nadam', metrics=['accuracy'
])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 20, epochs = 20)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='relu'))
model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 20, epochs = 20)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))
model.add(Dense(64,activation='tanh'))
model.add(Dense(64,activation='tanh'))
model.add(Dense(6,activation='softmax'))
model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 20, epochs = 20)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(512,activation='tanh'))
model.add(Dropout(0.2))
model.add(Dense(512,activation='relu'))
model.add(Dropout(0.2))
model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 20, epochs = 20)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(LSTM(128 , return_sequences=True))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(5))

model.add(Conv1D(128,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='relu'))

model.add(Dense(64,activation='relu'))

model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=['accuracy
'])
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='relu'))
model.add(Dense(64,activation='relu'))
model.add(Dense(64,activation='relu'))

model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=['accuracy
'])
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))

model.add(Dense(6,activation='softmax'))
model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=['accuracy
'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 70, epochs = 50)

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='tanh'))
model.add(Dense(64,activation='tanh'))
model.add(Dense(64,activation='tanh'))
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.0001), metrics=['a
ccuracy'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 50, epochs = 100)
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(128, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(32,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Flatten())
model.add(Dense(64,activation='relu'))
model.add(Dense(64,activation='relu'))
model.add(Dense(6,activation='softmax'))

model.summary()
model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.00001), metrics=['
accuracy'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 32, epochs = 40)

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(128, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(32,3,activation='relu'))
model.add(AveragePooling1D(3))

model.add(Conv1D(32,3,activation='relu'))
model.add(AveragePooling1D(3))

model.add(Conv1D(32,3,activation='relu'))
model.add(AveragePooling1D(3))

model.add(Flatten())

model.add(Dense(64,activation='relu'))
model.add(Dense(64,activation='relu'))
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.0001), metrics=['a
ccuracy'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 16, epochs = 50)

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(128, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(512,3,activation='relu'))
model.add(Conv1D(512,3,activation='relu'))
model.add(Conv1D(512,3,activation='relu'))
model.add(Conv1D(512,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(512,3))
model.add(Conv1D(512,3))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4096,activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(4096,activation='relu'))
model.add(Dropout(0.5))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer=Adam(lr=0.0001), metrics=['a
ccuracy'])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 32, epochs = 200)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(512, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dropout(0.3, noise_shape=None, seed=None))

model.add(Dense(516,activation='sigmoid'))
model.add(Dropout(0.4, noise_shape=None, seed=None))

model.add(Dense(216,activation='sigmoid'))
model.add(Dropout(0.2, noise_shape=None, seed=None))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 50, epochs = 30)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(1024, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dropout(0.3, noise_shape=None, seed=None))

model.add(Dense(1024,activation='relu'))
model.add(Dropout(0.4, noise_shape=None, seed=None))

model.add(Dense(216,activation='tanh'))
model.add(Dropout(0.2, noise_shape=None, seed=None))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']
)
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 50, epochs = 30)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(512, return_sequences=True)))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))

model.add(Dense(1024,activation='relu'))
model.add(Dropout(0.4, noise_shape=None, seed=None))

model.add(Dense(216,activation='relu'))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accurac
y'])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 64, epochs = 50)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(700, return_sequences=True)))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))

model.add(Dense(1024,activation='relu'))
model.add(Dropout(0.4, noise_shape=None, seed=None))

model.add(Dense(216,activation='relu'))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accurac
y'])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 64, epochs = 50)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))

model.add(Conv1D(32,3,activation='relu'))
model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(512,activation='relu'))
model.add(Dense(128,activation='relu'))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adamax', metrics=['accuracy
'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 64, epochs = 50)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(128, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(3))

model.add(Conv1D(64,3,activation='relu'))
model.add(GlobalMaxPooling1D())

model.add(Dense(64,activation='relu'))
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_hinge', optimizer='adam',  metrics=['accuracy'])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 128, epochs = 200)
show_plot(train_history)

Batchsize = 400
embedding_dim = 250
max_words = 160
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(512, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
# embedding matrix
print('preparing embedding matrix...')

MAX_NB_WORDS = 100000
words_not_found = []
nb_words = min(MAX_NB_WORDS, len(word2id))

# Với mỗi từ trong câu, lưu lại word vector phụ vụ để huấn luyện mô hình
embedding_matrix = np.zeros((nb_words, embedding_dim))
for word, i in word2id.items():
  try:
    if i >= nb_words:
      continue
    embedding_vector = model_fasttext[word]
    if (embedding_vector is not None) and len(embedding_vector) > 0:
      embedding_matrix[i] = embedding_vector
    else:
      words_not_found.append(word)
  except:
    words_not_found.append(word)

from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer=Adam(learning_rate=0.0001),  
metrics=['accuracy'])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 64, epochs = 20)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 64, epochs = 20)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(2))

model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(2))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D(2))

model.add(Flatten())

model.add(Dense(64,activation='relu'))
model.add(Dense(6,activation='softmax'))

model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size = 256, epochs = 100)
show_plot(train_history)
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(64, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Flatten())
model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])

train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size=256 , epochs = 100)
show_plot(train_history)

Thêm 1 lớp conv

from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Conv1D(32,3,activation='relu'))
model.add(MaxPooling1D(2))

model.add(Bidirectional(LSTM(64)))
model.add(Dropout(0.2))

model.add(Dense(6,activation='softmax'))

model.summary()

model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])
thêm 1 lớp conv
( tích chập -> LSTM )
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Bidirectional(LSTM(256)))
model.add(Dropout(0.5))

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
--------------------------- 15/12/2020 -----------------------

Khi chỉ xài tích chập đơn :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),embedding_dim,embeddings_initializer=Constant(embedding_m
atrix),input_length=max_words))
model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accuracy']
)
model.summary()
Khi xóa 1 lớp tích chập :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
Khi xóa 2 lớp tích chập :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
Không có ANN

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(256,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(516, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))
model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())
model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(128,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(128,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
model=Sequential()
model.add(Embedding(len(word2id), embedding_dim, embeddings_initializer=Constant(embe
dding_matrix), input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.2))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(6,activation='softmax'))

model.summary()
model.compile(loss='categorical_crossentropy', optimizer='adam',  metrics=['accuracy'
])

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())
model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(512, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
Data cũ ( trước augement)

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(512)))
model.add(Dropout(0.5))

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
train_history = model.fit(x_train, y_train , validation_data=(x_test,y_test) , batch_
size=64 , epochs = 15)
show_plot(train_history)
Data mới :
Thêm lớp tích chập :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
Thêm lớp tích chập 64 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
Thêm các lớp tích chập mới :
from keras.initializers import Constant

model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
New :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
New 2 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 3 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
New 4 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 5 :
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.25))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 6
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 7 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))
model.add(Bidirectional(LSTM(256, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 8 : Best
model_fasttext = FastText(size=379, window=17, min_count=3, workers=4, sg=1)
model_fasttext.build_vocab(data_train)
model_fasttext.train(data_train, total_examples = model_fasttext.corpus_count, epochs 
= model_fasttext.iter)

word2id = dict({'':0})
max_words = 300

for sentence in data_train:
  s = sentence.split(' ')
  for word in s:
    if word not in word2id:
      word2id[word] = len(word2id)

id2word = {v: k for k, v in word2id.items()}

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
New 9 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(256,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

New 10 :

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(256,3,activation='relu'))
model.add(Conv1D(512,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())
model.add(Dense(1024,activation='relu'))
model.add(Dense(516,activation='relu'))
model.add(Dense(4,activation='softmax'))
New code after update :

model_fasttext = FastText(size=379, window=17, min_count=3, workers=4, sg=1)
model_fasttext.build_vocab(data_ft)
model_fasttext.train(data_ft, total_examples = model_fasttext.corpus_count, epochs = 
model_fasttext.iter)

word2id = dict({'':0})
max_words = 300

for sentence in data_train:
  s = sentence.split(' ')
  for word in s:
    if word not in word2id:
      word2id[word] = len(word2id)

id2word = {v: k for k, v in word2id.items()}

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))
model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

Test 1 :

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(64,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(1024,activation='relu'))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

Test 2 :
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.25))

model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(64,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(Conv1D(128,3,activation='relu'))
model.add(MaxPooling1D())

model.add(Flatten())

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()
from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256)))
model.add(Dropout(0.5))

model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

from keras.initializers import Constant
model=Sequential()
model.add(Embedding(len(word2id),
    embedding_dim,
    embeddings_initializer=Constant(embedding_matrix),
    input_length=max_words))

model.add(Bidirectional(LSTM(256, return_sequences=True)))
model.add(Dropout(0.5))
model.add(LSTM(256))
model.add(Dropout(0.5))
model.add(Dense(4,activation='softmax'))

model.compile(loss = 'categorical_crossentropy',optimizer = 'adam', metrics = ['accur
acy'])
model.summary()

You might also like