Cainvas

Fake News Classification App

Credit: AITS Cainvas Community

Photo by Kait Cooper on Dribbble

In [1]:
import pandas as pd
import numpy as np
import tensorflow as tf
In [2]:
# importing the dataset
df = pd.read_csv('https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/dataset.csv')
df.head()
Out[2]:
id title author text label
0 0 House Dem Aide: We Didn’t Even See Comey’s Let... Darrell Lucus House Dem Aide: We Didn’t Even See Comey’s Let... 1
1 1 FLYNN: Hillary Clinton, Big Woman on Campus - ... Daniel J. Flynn Ever get the feeling your life circles the rou... 0
2 2 Why the Truth Might Get You Fired Consortiumnews.com Why the Truth Might Get You Fired October 29, ... 1
3 3 15 Civilians Killed In Single US Airstrike Hav... Jessica Purkiss Videos 15 Civilians Killed In Single US Airstr... 1
4 4 Iranian woman jailed for fictional unpublished... Howard Portnoy Print \nAn Iranian woman has been sentenced to... 1
In [3]:
# Drop the Nan Values
df=df.dropna()
In [4]:
df.label.value_counts()
Out[4]:
0    10361
1     7924
Name: label, dtype: int64
In [5]:
# Class count
count_class_0, count_class_1 = df.label.value_counts()

# Divide by class
df_class_0 = df[df['label'] == 0]
df_class_1 = df[df['label'] == 1]
In [6]:
# Oversample 1-class and concat the DataFrames of both classes
df_class_1_over = df_class_1.sample(count_class_0, replace=True)
df_test_over = pd.concat([df_class_0, df_class_1_over], axis=0)

print(df_test_over.label.value_counts())

X = df_test_over.drop('label',axis='columns')
y = df_test_over['label']
1    10361
0    10361
Name: label, dtype: int64
In [7]:
X.shape, y.shape
Out[7]:
((20722, 4), (20722,))
In [24]:
from tensorflow.keras import callbacks
from tensorflow.keras.layers import Embedding
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.models import Sequential
from tensorflow.keras.preprocessing.text import one_hot
from tensorflow.keras.layers import LSTM
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Bidirectional
from tensorflow.keras.layers import Dropout

Onehot Representation

In [9]:
# Vocabulary size
voc_size=30
In [10]:
X_copy=X.copy()
In [11]:
X_copy.reset_index(inplace=True)
In [12]:
import nltk
import re
from nltk.corpus import stopwords
In [13]:
nltk.download('stopwords')
[nltk_data] Downloading package stopwords to /home/jupyter-
[nltk_data]     gunjan/nltk_data...
[nltk_data]   Package stopwords is already up-to-date!
Out[13]:
True
In [14]:
from nltk.stem.porter import PorterStemmer
ps = PorterStemmer()
corpus = []
for i in range(0, len(X_copy)):
    review = re.sub('[^a-zA-Z]', ' ', X_copy['title'][i])
    review = review.lower()
    review = review.split()
    
    review = [ps.stem(word) for word in review if not word in stopwords.words('english')]
    review = ' '.join(review)
    corpus.append(review)
In [15]:
corpus[:20]
Out[15]:
['flynn hillari clinton big woman campu breitbart',
 'jacki mason hollywood would love trump bomb north korea lack tran bathroom exclus video breitbart',
 'beno hamon win french socialist parti presidenti nomin new york time',
 'back channel plan ukrain russia courtesi trump associ new york time',
 'obama organ action partner soro link indivis disrupt trump agenda',
 'bbc comedi sketch real housew isi caus outrag',
 'major leagu soccer argentin find home success new york time',
 'well fargo chief abruptli step new york time',
 'chuck todd buzzfe donald trump polit favor breitbart',
 'monica lewinski clinton sex scandal set american crime stori',
 'rob reiner trump mental unstabl breitbart',
 'abort pill order rise latin american nation zika alert new york time',
 'exclus islam state support vow shake west follow manchest terrorist massacr breitbart',
 'andrea tantaro fox news claim retali sex harass complaint new york time',
 'hillari clinton becam hawk new york time',
 'chuck todd buzzfe eic publish fake news breitbart',
 'bori johnson brexit leader fumbl new york time',
 'texa oil field rebound price lull job left behind new york time',
 'bayer deal monsanto follow agribusi trend rais worri farmer new york time',
 'russia move ban jehovah wit extremist new york time']
In [16]:
onehot_repr=[one_hot(words,voc_size)for words in corpus] 
onehot_repr[:20]
Out[16]:
[[25, 9, 17, 23, 9, 26, 13],
 [12, 1, 9, 20, 19, 10, 9, 21, 4, 12, 7, 12, 8, 3, 13],
 [9, 16, 20, 5, 27, 20, 26, 11, 29, 28, 5],
 [28, 23, 18, 26, 29, 15, 10, 21, 29, 28, 5],
 [12, 15, 21, 17, 20, 13, 8, 20, 10, 6],
 [16, 7, 27, 16, 16, 22, 4, 15],
 [21, 9, 21, 23, 2, 20, 28, 29, 28, 5],
 [21, 24, 23, 29, 15, 29, 28, 5],
 [10, 17, 12, 14, 10, 2, 28, 13],
 [29, 29, 17, 27, 19, 28, 7, 4, 1],
 [25, 13, 10, 4, 10, 13],
 [19, 24, 29, 23, 2, 7, 28, 9, 29, 29, 28, 5],
 [8, 27, 12, 26, 11, 28, 13, 29, 28, 8, 11, 13],
 [14, 7, 16, 7, 15, 2, 27, 15, 22, 29, 28, 5],
 [9, 17, 9, 7, 29, 28, 5],
 [10, 17, 12, 14, 12, 5, 7, 13],
 [28, 25, 17, 8, 24, 29, 28, 5],
 [13, 9, 13, 20, 7, 18, 6, 17, 25, 29, 28, 5],
 [17, 3, 4, 29, 9, 27, 13, 24, 5, 29, 28, 5],
 [29, 2, 17, 3, 18, 7, 29, 28, 5]]

Embedding Representation

In [17]:
sent_length=20
embedded_docs=pad_sequences(onehot_repr,padding='pre',maxlen=sent_length)
print(embedded_docs)
[[ 0  0  0 ...  9 26 13]
 [ 0  0  0 ...  8  3 13]
 [ 0  0  0 ... 29 28  5]
 ...
 [ 0  0  0 ...  2  9 15]
 [ 0  0  0 ... 11  9 26]
 [ 0  0  0 ... 13 18 27]]
In [18]:
embedded_docs[0]
Out[18]:
array([ 0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0,  0, 25,  9, 17, 23,
        9, 26, 13], dtype=int32)

Building the model

In [19]:
embedding_vector_features=40
model1=Sequential()
model1.add(Embedding(voc_size,embedding_vector_features,input_length=sent_length))
model1.add(Dropout(0.7))
model1.add(Bidirectional(LSTM(100)))
model1.add(Dropout(0.7))
model1.add(Dense(1,activation='sigmoid'))
model1.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])
model1.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
embedding (Embedding)        (None, 20, 40)            1200      
_________________________________________________________________
dropout (Dropout)            (None, 20, 40)            0         
_________________________________________________________________
bidirectional (Bidirectional (None, 200)               112800    
_________________________________________________________________
dropout_1 (Dropout)          (None, 200)               0         
_________________________________________________________________
dense (Dense)                (None, 1)                 201       
=================================================================
Total params: 114,201
Trainable params: 114,201
Non-trainable params: 0
_________________________________________________________________
In [20]:
X_final=np.array(embedded_docs)
y_final=np.array(y)
In [21]:
X_final.shape,y_final.shape
Out[21]:
((20722, 20), (20722,))
In [22]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X_final, y_final, test_size=0.33, random_state=42, stratify=y)

Training Model

In [25]:
model_name = "fake_news.h5"

cb = [
    callbacks.EarlyStopping(monitor = 'val_loss', patience = 5, restore_best_weights = True),
    callbacks.ModelCheckpoint(model_name, monitor = "val_loss", save_best_only = True)
    ]

history = model1.fit(X_train, y_train, validation_data=(X_test,y_test), epochs=50, batch_size=64, callbacks = cb)
Epoch 1/50
217/217 [==============================] - 6s 27ms/step - loss: 0.4415 - accuracy: 0.7977 - val_loss: 0.3099 - val_accuracy: 0.8839
Epoch 2/50
217/217 [==============================] - 5s 21ms/step - loss: 0.3412 - accuracy: 0.8669 - val_loss: 0.2909 - val_accuracy: 0.8921
Epoch 3/50
217/217 [==============================] - 4s 20ms/step - loss: 0.3174 - accuracy: 0.8806 - val_loss: 0.2921 - val_accuracy: 0.8975
Epoch 4/50
217/217 [==============================] - 4s 20ms/step - loss: 0.3058 - accuracy: 0.8878 - val_loss: 0.2850 - val_accuracy: 0.8917
Epoch 5/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2978 - accuracy: 0.8909 - val_loss: 0.2794 - val_accuracy: 0.8968
Epoch 6/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2984 - accuracy: 0.8919 - val_loss: 0.2767 - val_accuracy: 0.8963
Epoch 7/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2923 - accuracy: 0.8910 - val_loss: 0.2682 - val_accuracy: 0.9013
Epoch 8/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2867 - accuracy: 0.8930 - val_loss: 0.2667 - val_accuracy: 0.9014
Epoch 9/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2980 - accuracy: 0.8874 - val_loss: 0.2706 - val_accuracy: 0.9009
Epoch 10/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2886 - accuracy: 0.8926 - val_loss: 0.2683 - val_accuracy: 0.8988
Epoch 11/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2892 - accuracy: 0.8925 - val_loss: 0.2636 - val_accuracy: 0.8988
Epoch 12/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2866 - accuracy: 0.8914 - val_loss: 0.2673 - val_accuracy: 0.8984
Epoch 13/50
217/217 [==============================] - 3s 13ms/step - loss: 0.2867 - accuracy: 0.8904 - val_loss: 0.2668 - val_accuracy: 0.8953
Epoch 14/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2836 - accuracy: 0.8943 - val_loss: 0.2609 - val_accuracy: 0.9016
Epoch 15/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2807 - accuracy: 0.8948 - val_loss: 0.2613 - val_accuracy: 0.9001
Epoch 16/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2803 - accuracy: 0.8953 - val_loss: 0.2581 - val_accuracy: 0.9010
Epoch 17/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2797 - accuracy: 0.8958 - val_loss: 0.2622 - val_accuracy: 0.9007
Epoch 18/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2731 - accuracy: 0.8972 - val_loss: 0.2636 - val_accuracy: 0.8968
Epoch 19/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2737 - accuracy: 0.8949 - val_loss: 0.2581 - val_accuracy: 0.8981
Epoch 20/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2724 - accuracy: 0.8953 - val_loss: 0.2574 - val_accuracy: 0.8982
Epoch 21/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2719 - accuracy: 0.8970 - val_loss: 0.2564 - val_accuracy: 0.8990
Epoch 22/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2709 - accuracy: 0.8981 - val_loss: 0.2536 - val_accuracy: 0.9025
Epoch 23/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2692 - accuracy: 0.8978 - val_loss: 0.2516 - val_accuracy: 0.9019
Epoch 24/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2694 - accuracy: 0.8996 - val_loss: 0.2531 - val_accuracy: 0.9033
Epoch 25/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2672 - accuracy: 0.8987 - val_loss: 0.2512 - val_accuracy: 0.9029
Epoch 26/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2695 - accuracy: 0.8976 - val_loss: 0.2526 - val_accuracy: 0.9025
Epoch 27/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2671 - accuracy: 0.8991 - val_loss: 0.2539 - val_accuracy: 0.8987
Epoch 28/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2672 - accuracy: 0.8978 - val_loss: 0.2515 - val_accuracy: 0.9029
Epoch 29/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2653 - accuracy: 0.9003 - val_loss: 0.2526 - val_accuracy: 0.8993
Epoch 30/50
217/217 [==============================] - 4s 16ms/step - loss: 0.2640 - accuracy: 0.8999 - val_loss: 0.2503 - val_accuracy: 0.9035
Epoch 31/50
217/217 [==============================] - 4s 16ms/step - loss: 0.2654 - accuracy: 0.8999 - val_loss: 0.2560 - val_accuracy: 0.9003
Epoch 32/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2658 - accuracy: 0.8984 - val_loss: 0.2496 - val_accuracy: 0.9033
Epoch 33/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2631 - accuracy: 0.8992 - val_loss: 0.2509 - val_accuracy: 0.9007
Epoch 34/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2587 - accuracy: 0.9002 - val_loss: 0.2522 - val_accuracy: 0.9007
Epoch 35/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2607 - accuracy: 0.8996 - val_loss: 0.2513 - val_accuracy: 0.9032
Epoch 36/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2590 - accuracy: 0.9012 - val_loss: 0.2518 - val_accuracy: 0.9022
Epoch 37/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2593 - accuracy: 0.9018 - val_loss: 0.2474 - val_accuracy: 0.9025
Epoch 38/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2591 - accuracy: 0.9004 - val_loss: 0.2466 - val_accuracy: 0.9038
Epoch 39/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2582 - accuracy: 0.9015 - val_loss: 0.2482 - val_accuracy: 0.9039
Epoch 40/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2580 - accuracy: 0.9001 - val_loss: 0.2472 - val_accuracy: 0.9048
Epoch 41/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2542 - accuracy: 0.9024 - val_loss: 0.2479 - val_accuracy: 0.9023
Epoch 42/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2558 - accuracy: 0.9011 - val_loss: 0.2460 - val_accuracy: 0.9054
Epoch 43/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2546 - accuracy: 0.9033 - val_loss: 0.2466 - val_accuracy: 0.9029
Epoch 44/50
217/217 [==============================] - 4s 20ms/step - loss: 0.2556 - accuracy: 0.9031 - val_loss: 0.2468 - val_accuracy: 0.9038
Epoch 45/50
217/217 [==============================] - 4s 21ms/step - loss: 0.2574 - accuracy: 0.9010 - val_loss: 0.2478 - val_accuracy: 0.9039
Epoch 46/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2520 - accuracy: 0.9029 - val_loss: 0.2463 - val_accuracy: 0.9038
Epoch 47/50
217/217 [==============================] - 5s 21ms/step - loss: 0.2500 - accuracy: 0.9025 - val_loss: 0.2450 - val_accuracy: 0.9044
Epoch 48/50
217/217 [==============================] - 3s 12ms/step - loss: 0.2517 - accuracy: 0.9035 - val_loss: 0.2456 - val_accuracy: 0.9042
Epoch 49/50
217/217 [==============================] - 4s 18ms/step - loss: 0.2487 - accuracy: 0.9044 - val_loss: 0.2435 - val_accuracy: 0.9061
Epoch 50/50
217/217 [==============================] - 4s 19ms/step - loss: 0.2491 - accuracy: 0.9036 - val_loss: 0.2438 - val_accuracy: 0.9063
In [27]:
from matplotlib import pyplot as plt
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
In [26]:
from matplotlib import pyplot as plt
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()

Predictions

In [28]:
y_pred1=model1.predict_classes(X_test)
WARNING:tensorflow:From <ipython-input-28-ccbb315231b5>:1: Sequential.predict_classes (from tensorflow.python.keras.engine.sequential) is deprecated and will be removed after 2021-01-01.
Instructions for updating:
Please use instead:* `np.argmax(model.predict(x), axis=-1)`,   if your model does multi-class classification   (e.g. if it uses a `softmax` last-layer activation).* `(model.predict(x) > 0.5).astype("int32")`,   if your model does binary classification   (e.g. if it uses a `sigmoid` last-layer activation).
In [29]:
from sklearn.metrics import confusion_matrix
In [30]:
confusion_matrix(y_test,y_pred1)
Out[30]:
array([[2831,  589],
       [  52, 3367]])
In [31]:
import seaborn as sn
from matplotlib import pyplot as plt
cm = tf.math.confusion_matrix(labels = y_test, predictions = y_pred1)

plt.figure(figsize = (10,7))
sn.heatmap(cm, annot = True, fmt = 'd')
plt.xlabel("predicted")
plt.ylabel("actual")
Out[31]:
Text(69.0, 0.5, 'actual')

Accuracy of the Model

In [32]:
from sklearn.metrics import accuracy_score
accuracy_score(y_test,y_pred1)
Out[32]:
0.9062728469074426
In [33]:
from sklearn.metrics import classification_report
print(classification_report(y_test,y_pred1))
              precision    recall  f1-score   support

           0       0.98      0.83      0.90      3420
           1       0.85      0.98      0.91      3419

    accuracy                           0.91      6839
   macro avg       0.92      0.91      0.91      6839
weighted avg       0.92      0.91      0.91      6839

In [34]:
# importing the test data
In [35]:
df_test = pd.read_csv('https://cainvas-static.s3.amazonaws.com/media/user_data/cainvas-admin/test.csv')
In [36]:
df_test[:5]
Out[36]:
id title author text
0 20800 Specter of Trump Loosens Tongues, if Not Purse... David Streitfeld PALO ALTO, Calif. — After years of scorning...
1 20801 Russian warships ready to strike terrorists ne... NaN Russian warships ready to strike terrorists ne...
2 20802 #NoDAPL: Native American Leaders Vow to Stay A... Common Dreams Videos #NoDAPL: Native American Leaders Vow to...
3 20803 Tim Tebow Will Attempt Another Comeback, This ... Daniel Victor If at first you don’t succeed, try a different...
4 20804 Keiser Report: Meme Wars (E995) Truth Broadcast Network 42 mins ago 1 Views 0 Comments 0 Likes 'For th...
In [37]:
df_test = df_test.reset_index(drop=True)
In [38]:
df_test1=np.array(embedded_docs)

making Predictions for test data

In [39]:
y_pred2=model1.predict_classes(df_test1)
In [40]:
y_pred2 = pd.DataFrame(y_pred2, columns=['lables'])
In [41]:
df_final_0 = pd.concat([df_test['id'], y_pred2], axis = 1)
In [42]:
df_final_0.to_csv('Predictions')
In [43]:
df_final_0.head(10)
Out[43]:
id lables
0 20800.0 1
1 20801.0 0
2 20802.0 0
3 20803.0 0
4 20804.0 1
5 20805.0 1
6 20806.0 0
7 20807.0 0
8 20808.0 0
9 20809.0 1