Professional Documents
Culture Documents
ACTIVITY PERTEMUAN 5
NPM : 57418379
KELAS : 3IA07
MATERI : Sentimen Analisis DGX - 1
MATA PRAKTIKUM : Pengantar Kecerdasan Buatan
In [21]:
import warnings
warnings.filterwarnings('ignore')
%matplotlib inline
import string
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import sklearn
matplotlib.rc('xtick', labelsize=14)
matplotlib.rc('ytick', labelsize=14)
In [22]:
with open("../../full_set.txt") as f:
content = f.readlines()
In [23]:
content[0:10]
Out[23]:
In [24]:
In [25]:
sentences[0:10]
Out[25]:
In [26]:
labels[0:10]
Out[26]:
['0', '1', '1', '0', '1', '0', '0', '1', '0', '0']
In [27]:
y = np.array(labels, dtype='int8')
y = 2*y - 1
In [28]:
##del str
In [29]:
In [30]:
In [31]:
sents_processed[0:20]
Out[31]:
In [32]:
In [33]:
In [34]:
np.random.seed(0)
test_index = np.append(np.random.choice((np.where(y==1))[0], 250, replace = False), np.
random.choice((np.where(y==1))[0], 250, replace = False))
train_index = list(set(range(len(labels))) - set(test_index))
train_data = data_mat[train_index,]
train_labels = y[train_index]
test_data = data_mat[test_index,]
test_labels = y[test_index]
In [35]:
In [36]:
X = tokenizer.texts_to_sequences(sents_processed)
X = sequence.pad_sequences(X, maxlen=max_review_length)
print('Shape of data tensor:', X.shape)
Shape of data tensor: (3000, 200)
In [37]:
import pandas as pd
Y = pd.get_dummies(y).values
Y
Out[37]:
array([[1, 0],
[0, 1],
[0, 1],
...,
[1, 0],
[1, 0],
[1, 0]], dtype=uint8)
In [38]:
np.random.seed(0)
test_inds = np.append(
np.random.choice((np.where(y==-1))[0], 250, replace = False),
np.random.choice((np.where(y==1))[0], 250, replace = False))
train_inds = list(set(range(len(labels))) - set(test_inds))
train_data = X[train_inds,]
train_labels = Y[train_inds]
test_data = X[test_inds,]
test_labels = Y[test_inds]
In [39]:
EMBEDDING_DIM = 200
model = Sequential()
model.add(Embedding(10000, EMBEDDING_DIM, input_length=X.shape[1]))
model.add(SpatialDropout1D(0.2))
model.add(LSTM(250, dropout=0.2, return_sequences=True))
model.add(LSTM(100, dropout=0.2, recurrent_dropout=0.2))
model.add(Dense(2, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())
Model: "sequential_2"
None
In [40]:
epochs = 2
batch_size = 40
model.fit(train_data, train_labels,
epochs=epochs,
batch_size=batch_size,
validation_split=0.1)
-
InternalError Traceback (most recent call las
t)
<ipython-input-40-0b400d79b3a7> in <module>
4 epochs=epochs,
5 batch_size=batch_size,
----> 6 validation_split=0.1)
/usr/local/lib/python3.5/dist-packages/keras/engine/training.py in fit(sel
f, x, y, batch_size, epochs, verbose, callbacks, validation_split, validat
ion_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_e
poch, validation_steps, validation_freq, max_queue_size, workers, use_mult
iprocessing, **kwargs)
1211 else:
1212 fit_inputs = x + y + sample_weights
-> 1213 self._make_train_function()
1214 fit_function = self.train_function
1215
/usr/local/lib/python3.5/dist-packages/keras/engine/training.py in _make_t
rain_function(self)
331 updates=updates + metrics_updates,
332 name='train_function',
--> 333 **self._function_kwargs)
334
335 def _make_test_function(self):
/usr/local/lib/python3.5/dist-packages/keras/backend/tensorflow_backend.py
in function(inputs, outputs, updates, **kwargs)
3004 def function(inputs, outputs, updates=None, **kwargs):
3005 if _is_tf_1():
-> 3006 v1_variable_initialization()
3007 return tf_keras_backend.function(inputs, outputs,
3008 updates=updates,
/usr/local/lib/python3.5/dist-packages/keras/backend/tensorflow_backend.py
in v1_variable_initialization()
418
419 def v1_variable_initialization():
--> 420 session = get_session()
421 with session.graph.as_default():
422 variables = tf.global_variables()
/usr/local/lib/python3.5/dist-packages/keras/backend/tensorflow_backend.py
in get_session()
383 '`get_session` is not available when '
384 'TensorFlow is executing eagerly.')
--> 385 return tf_keras_backend.get_session()
386
387
/usr/local/lib/python3.5/dist-packages/tensorflow/python/keras/backend.py
in get_session()
477 A TensorFlow session.
478 """
--> 479 session = _get_session()
480 if not _MANUAL_VAR_INIT:
481 with session.graph.as_default():
/usr/local/lib/python3.5/dist-packages/tensorflow/python/keras/backend.py
jupiternb.gunadarma.ac.id:8989/nbconvert/html/3IA07/Wisnu Trenggono Wirayuda/Pertemuan 5.ipynb?download=false 7/9
1/16/2021 Pertemuan 5
in _get_session()
455 if getattr(_SESSION, 'session', None) is None:
456 _SESSION.session = session_module.Session(
--> 457 config=get_default_session_config())
458 session = _SESSION.session
459 return session
/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py
in init (self, target, graph, config)
1549
1550 """
-> 1551 super(Session, self). init (target, graph, config=config)
1552 # NOTE(mrry): Create these on first ` enter ` to avoid a ref
erence cycle.
1553 self._default_graph_context_manager = None
/usr/local/lib/python3.5/dist-packages/tensorflow/python/client/session.py
in init (self, target, graph, config)
674 try:
675 # pylint: disable=protected-access
--> 676 self._session = tf_session.TF_NewSessionRef(self._graph._c_g
raph, opts)
677 # pylint: enable=protected-access
678 finally:
In [ ]:
In [ ]:
seq = tokenizer.texts_to_sequences(new)
padded = sequence.pad_sequences(seq, maxlen=max_review_length)
pred = model.predict(padded)
print("Probability distribution: ", pred)
print("Is this a Positive or Negative review? ")
print(outcome_labels[np.argmax(pred)])
In [ ]:
seq = tokenizer.texts_to_sequences(new)
padded = sequence.pad_sequences(seq, maxlen=max_review_length)
pred = model.predict(padded)
print("Probability distribution: ", pred)
print("Is this a Positive or Negative review? ")
print(outcome_labels[np.argmax(pred)])
In [ ]:
seq = tokenizer.texts_to_sequences(new)
padded = sequence.pad_sequences(seq, maxlen=max_review_length)
pred = model.predict(padded)
print("Probability distribution: ", pred)
print("Is this a Positive or Negative review? ")
print(outcome_labels[np.argmax(pred)])
In [ ]: