gridflowai
commited on
Commit
·
97615dd
1
Parent(s):
0c396ba
Upload app.py
Browse files
app.py
CHANGED
@@ -50,38 +50,40 @@ def preprocess_text_for_rnn(text, tokenizer, maxlen):
|
|
50 |
sequence = tokenizer.texts_to_sequences([text])
|
51 |
padded_sequence = pad_sequences(sequence, padding='post', maxlen=maxlen)
|
52 |
return padded_sequence
|
53 |
-
|
54 |
def predict_lr(text):
|
55 |
preprocessed_text = preprocess_text(text)
|
56 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
57 |
-
dense_vectorized_text = vectorized_text.toarray()
|
58 |
-
|
59 |
-
prediction
|
60 |
-
|
61 |
|
|
|
62 |
def predict_svm(text):
|
63 |
preprocessed_text = preprocess_text(text)
|
64 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
65 |
-
dense_vectorized_text = vectorized_text.toarray()
|
66 |
-
|
67 |
-
prediction
|
68 |
-
return prediction, prediction_probs
|
69 |
|
|
|
70 |
def predict_nn(text):
|
71 |
preprocessed_text = preprocess_text(text)
|
72 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
73 |
-
dense_vectorized_text = vectorized_text.toarray()
|
|
|
74 |
prediction_probs = nn_model.predict(dense_vectorized_text)[0]
|
75 |
prediction = int(np.argmax(prediction_probs))
|
76 |
-
return prediction
|
77 |
|
|
|
78 |
def predict_mnb(text):
|
79 |
preprocessed_text = preprocess_text(text)
|
80 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
81 |
-
dense_vectorized_text = vectorized_text.toarray()
|
82 |
-
|
83 |
-
prediction
|
84 |
-
return prediction, prediction_probs
|
85 |
|
86 |
def predict_rnn(text):
|
87 |
processed_text = preprocess_text_for_rnn(text, tokenizer, maxlen=170)
|
@@ -92,15 +94,19 @@ def predict_rnn(text):
|
|
92 |
def sentiment_prediction(text, model):
|
93 |
prediction, percentages = 0, []
|
94 |
if model == "Logistic Regression":
|
95 |
-
prediction
|
96 |
elif model == "SVM":
|
97 |
-
prediction
|
98 |
elif model == "Neural Network":
|
99 |
-
prediction
|
100 |
elif model == "Multinomial Naive Bayes":
|
101 |
-
prediction
|
102 |
elif model == "Recurrent Neural Network":
|
103 |
prediction, percentages = predict_rnn(text)
|
|
|
|
|
|
|
|
|
104 |
|
105 |
# Displaying emojis based on sentiment
|
106 |
emoji_positive = "😃"
|
@@ -113,11 +119,14 @@ def sentiment_prediction(text, model):
|
|
113 |
# Create label for the prediction
|
114 |
prediction_label = labels[prediction]
|
115 |
|
116 |
-
|
117 |
-
|
118 |
-
|
|
|
|
|
|
|
|
|
119 |
|
120 |
-
return prediction_label, f"{labels[0]}: {percentage_negative:.2%}, Percentage {labels[1]}: {percentage_positive:.2%}", emoji
|
121 |
|
122 |
# Create the Gradio interface
|
123 |
iface = gr.Interface(
|
|
|
50 |
sequence = tokenizer.texts_to_sequences([text])
|
51 |
padded_sequence = pad_sequences(sequence, padding='post', maxlen=maxlen)
|
52 |
return padded_sequence
|
53 |
+
# Function to predict sentiment using Logistic Regression
|
54 |
def predict_lr(text):
|
55 |
preprocessed_text = preprocess_text(text)
|
56 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
57 |
+
dense_vectorized_text = vectorized_text.toarray() # Convert to dense array
|
58 |
+
prediction = int(lr_model.predict(dense_vectorized_text)[0])
|
59 |
+
return prediction
|
60 |
+
|
61 |
|
62 |
+
# Function to predict sentiment using SVM
|
63 |
def predict_svm(text):
|
64 |
preprocessed_text = preprocess_text(text)
|
65 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
66 |
+
dense_vectorized_text = vectorized_text.toarray() # Convert to dense array
|
67 |
+
prediction = int(svm_model.predict(dense_vectorized_text)[0])
|
68 |
+
return prediction
|
|
|
69 |
|
70 |
+
# Function to predict sentiment using Neural Network
|
71 |
def predict_nn(text):
|
72 |
preprocessed_text = preprocess_text(text)
|
73 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
74 |
+
dense_vectorized_text = vectorized_text.toarray() # Convert to dense array
|
75 |
+
|
76 |
prediction_probs = nn_model.predict(dense_vectorized_text)[0]
|
77 |
prediction = int(np.argmax(prediction_probs))
|
78 |
+
return prediction
|
79 |
|
80 |
+
# Function to predict sentiment using Multinomial Naive Bayes
|
81 |
def predict_mnb(text):
|
82 |
preprocessed_text = preprocess_text(text)
|
83 |
vectorized_text = tfidf_vectorizer.transform([preprocessed_text])
|
84 |
+
dense_vectorized_text = vectorized_text.toarray() # Convert to dense array
|
85 |
+
prediction = int(mnb_model.predict(dense_vectorized_text)[0])
|
86 |
+
return prediction
|
|
|
87 |
|
88 |
def predict_rnn(text):
|
89 |
processed_text = preprocess_text_for_rnn(text, tokenizer, maxlen=170)
|
|
|
94 |
def sentiment_prediction(text, model):
|
95 |
prediction, percentages = 0, []
|
96 |
if model == "Logistic Regression":
|
97 |
+
prediction = predict_lr(text)
|
98 |
elif model == "SVM":
|
99 |
+
prediction = predict_svm(text)
|
100 |
elif model == "Neural Network":
|
101 |
+
prediction = predict_nn(text)
|
102 |
elif model == "Multinomial Naive Bayes":
|
103 |
+
prediction = predict_mnb(text)
|
104 |
elif model == "Recurrent Neural Network":
|
105 |
prediction, percentages = predict_rnn(text)
|
106 |
+
# Calculate percentages for both labels
|
107 |
+
percentage_negative = percentages[0]
|
108 |
+
percentage_positive = percentages[1]
|
109 |
+
|
110 |
|
111 |
# Displaying emojis based on sentiment
|
112 |
emoji_positive = "😃"
|
|
|
119 |
# Create label for the prediction
|
120 |
prediction_label = labels[prediction]
|
121 |
|
122 |
+
if model == "Recurrent Neural Network":
|
123 |
+
return prediction_label, f" {labels[0]}: {percentage_negative:.2%}, Percentage {labels[1]}: {percentage_positive:.2%}", emoji
|
124 |
+
else:
|
125 |
+
|
126 |
+
return prediction_label, f"NOT AVAILABLE", emoji
|
127 |
+
|
128 |
+
|
129 |
|
|
|
130 |
|
131 |
# Create the Gradio interface
|
132 |
iface = gr.Interface(
|