diff --git a/Amongus_Character/among.py b/Amongus_Character/among.py deleted file mode 100644 index a406505..0000000 --- a/Amongus_Character/among.py +++ /dev/null @@ -1,99 +0,0 @@ -import turtle - -body_color = 'cyan' -glass_color = '#9acedc' - -s = turtle.getscreen() -t = turtle.Turtle() - -def body(): - t.pensize(20) - t.fillcolor(body_color) - t.begin_fill() - t.right(90) - t.forward(50) - t.right(180) - t.circle(40, -180) - t.right(180) - t.forward(200) - - t.right(180) - t.circle(100, -180) - - t.backward(20) - t.left(15) - t.circle(500, -20) - t.backward(20) - - t.circle(40, -180) - t.left(7) - t.backward(50) - - t.up() - t.left(90) - t.forward(10) - t.right(90) - t.down() - - t.right(240) - t.circle(50, -70) - - t.end_fill() - -def glass(): - t.up() - t.right(230) - t.forward(100) - t.left(90) - t.forward(20) - t.right(90) - - t.down() - t.fillcolor(glass_color) - t.begin_fill() - - t.right(150) - t.circle(90, -55) - - t.right(180) - t.forward(1) - t.right(180) - t.circle(10, -65) - t.right(180) - t.forward(110) - t.right(180) - - t.circle(50, -190) - t.right(170) - t.forward(80) - - t.right(180) - t.circle(45, -30) - - t.end_fill() - -def backpack(): - t.up() - t.right(60) - t.forward(100) - t.right(90) - t.forward(75) - - t.fillcolor(body_color) - t.begin_fill() - - t.down() - t.forward(30) - t.right(255) - - t.circle(300, -30) - t.right(260) - t.forward(30) - - t.end_fill() - -body() -glass() -backpack() - -t.screen.exitonclick() diff --git a/Cartoonize/cartoon.py b/Cartoonize/cartoon.py deleted file mode 100644 index 14f180f..0000000 --- a/Cartoonize/cartoon.py +++ /dev/null @@ -1,22 +0,0 @@ -import cv2 -import numpy as np -from tkinter.filedialog import * - -photo = askopenfilename() -img = cv2.imread(photo) - -grey = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) -grey = cv2.medianBlur(grey, 5) -edges = cv2.adaptiveThreshold(grey, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 9, 9) - -#cartoonize -color = cv2.bilateralFilter(img, 9, 250, 250) -cartoon = cv2.bitwise_and(color, color, mask = edges) - -cv2.imshow("Image", img) -cv2.imshow("Cartoon", cartoon) - -#save -cv2.imwrite("cartoon.jpg", cartoon) -cv2.waitKey(0) -cv2.destroyAllWindows() \ No newline at end of file diff --git a/CoronaTrackerAPI/corona.py b/CoronaTrackerAPI/corona.py deleted file mode 100644 index a450503..0000000 --- a/CoronaTrackerAPI/corona.py +++ /dev/null @@ -1,38 +0,0 @@ -import tkinter as tk -import requests -import datetime - -def getCovidData(): - api = "https://disease.sh/v3/covid-19/all" - json_data = requests.get(api).json() - total_cases = str(json_data['cases']) - total_deaths = str(json_data['deaths']) - today_cases = str(json_data['todayCases']) - today_deaths = str(json_data['todayDeaths']) - today_recovered = str(json_data['todayRecovered']) - updated_at = json_data['updated'] - date = datetime.datetime.fromtimestamp(updated_at/1e3) - label.config(text = "Total Cases: "+total_cases+ - "\n"+"Total Deaths: "+total_deaths+ - "\n"+"Today Cases: "+today_cases+ - "\n"+"Today Deaths: "+today_deaths+ - "\n"+"Today Recovered: "+today_recovered) - - label2.config(text = date) -canvas = tk.Tk() -canvas.geometry("400x400") -canvas.title("Corona Tracker App") - -f = ("poppins", 15, "bold") - -button = tk.Button(canvas, font = f, text = "Load", command = getCovidData) -button.pack(pady = 20) - -label = tk.Label(canvas, font = f) -label.pack(pady=20) - -label2 = tk.Label(canvas, font = 8) -label2.pack() -getCovidData() - -canvas.mainloop() \ No newline at end of file diff --git a/Hand Gesture Detection/Explaination.pdf b/Hand Gesture Detection/Explaination.pdf new file mode 100644 index 0000000..ac4692d Binary files /dev/null and b/Hand Gesture Detection/Explaination.pdf differ diff --git a/Hand Gesture Detection/app.py b/Hand Gesture Detection/app.py new file mode 100644 index 0000000..09bf38f --- /dev/null +++ b/Hand Gesture Detection/app.py @@ -0,0 +1,112 @@ +import cv2 +import mediapipe as mp + +# Initialize Mediapipe hands and drawing utilities +mp_hands = mp.solutions.hands +mp_drawing = mp.solutions.drawing_utils + +# Function to determine if hand is open +def is_open_hand(landmarks): + thumb_tip = landmarks[mp_hands.HandLandmark.THUMB_TIP] + index_tip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_TIP] + middle_tip = landmarks[mp_hands.HandLandmark.MIDDLE_FINGER_TIP] + ring_tip = landmarks[mp_hands.HandLandmark.RING_FINGER_TIP] + pinky_tip = landmarks[mp_hands.HandLandmark.PINKY_TIP] + + # Check if all fingers are above the base of the hand + return (index_tip.y < thumb_tip.y and + middle_tip.y < thumb_tip.y and + ring_tip.y < thumb_tip.y and + pinky_tip.y < thumb_tip.y) + +# Function to determine if the gesture is a thumbs up +def is_thumbs_up(landmarks): + thumb_tip = landmarks[mp_hands.HandLandmark.THUMB_TIP] + index_tip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_TIP] + + # Check if the thumb is extended and the index is down + return (thumb_tip.y < index_tip.y) + +# Function to determine if the gesture is a peace sign +def is_peace_sign(landmarks): + index_tip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_TIP] + middle_tip = landmarks[mp_hands.HandLandmark.MIDDLE_FINGER_TIP] + ring_tip = landmarks[mp_hands.HandLandmark.RING_FINGER_TIP] + pinky_tip = landmarks[mp_hands.HandLandmark.PINKY_TIP] + + # Check if index and middle fingers are up while others are down + return (index_tip.y < middle_tip.y and + ring_tip.y > middle_tip.y and + pinky_tip.y > middle_tip.y) + + +# Function to determine if the gesture is a two fingers up +def is_two_fingers_up(landmarks): + index_tip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_TIP] + middle_tip = landmarks[mp_hands.HandLandmark.MIDDLE_FINGER_TIP] + thumb_tip = landmarks[mp_hands.HandLandmark.THUMB_TIP] + ring_tip = landmarks[mp_hands.HandLandmark.RING_FINGER_TIP] + pinky_tip = landmarks[mp_hands.HandLandmark.PINKY_TIP] + + # Check if index and middle fingers are up while others are down + return (index_tip.y < thumb_tip.y and + middle_tip.y < thumb_tip.y and + ring_tip.y > thumb_tip.y and + pinky_tip.y > thumb_tip.y) + +# Start video capture +cap = cv2.VideoCapture(0) + +with mp_hands.Hands(min_detection_confidence=0.7, min_tracking_confidence=0.7) as hands: + while cap.isOpened(): + ret, frame = cap.read() + if not ret: + print("Error: Failed to capture video.") + break + + # Convert the frame to RGB + image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + image.flags.writeable = False + + # Process the image and detect hands + results = hands.process(image) + + # Convert back to BGR for rendering + image.flags.writeable = True + image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) + + if results.multi_hand_landmarks: + for hand_landmarks in results.multi_hand_landmarks: + # Draw hand landmarks + mp_drawing.draw_landmarks(image, hand_landmarks, mp_hands.HAND_CONNECTIONS) + + # Determine gesture + if is_open_hand(hand_landmarks.landmark): + gesture_text = 'Open Hand' + gesture_color = (0, 255, 0) # Green + elif is_thumbs_up(hand_landmarks.landmark): + gesture_text = 'Thumbs Up' + gesture_color = (255, 255, 0) # Cyan + elif is_peace_sign(hand_landmarks.landmark): + gesture_text = 'Peace Sign' + gesture_color = (255, 0, 255) # Magenta + elif is_two_fingers_up(hand_landmarks.landmark): + gesture_text = 'Two Fingers Up' + gesture_color = (128, 0, 128) # Purple + else: + gesture_text = 'Fist' + gesture_color = (0, 0, 255) # Red + + # Display gesture text on the image + cv2.putText(image, gesture_text, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, gesture_color, 2) + + # Display the resulting frame + cv2.imshow('Hand Gesture Detection', image) + + # Break the loop on 'q' key press + if cv2.waitKey(1) & 0xFF == ord('q'): + break + +# Release the capture and destroy windows +cap.release() +cv2.destroyAllWindows() diff --git a/Language Detection/Explaination.pdf b/Language Detection/Explaination.pdf new file mode 100644 index 0000000..5588ba3 Binary files /dev/null and b/Language Detection/Explaination.pdf differ diff --git a/Language Detection/LangDetect.py b/Language Detection/LangDetect.py new file mode 100644 index 0000000..ddb44b4 --- /dev/null +++ b/Language Detection/LangDetect.py @@ -0,0 +1,188 @@ +import tkinter as tk +from tkinter import filedialog +import string +import pandas as pd +import matplotlib.pyplot as plt +import seaborn as sns +from statistics import mean +from googletrans import Translator + +# Constants for column names +CNST_TEXT_LINE_NAME = "Input Text" +CNST_TEST_LANGDETECT_LANG_LINE_NAME = "LangDetect: Detected Language" +CNST_TEST_LANGDETECT_PROB_LINE_NAME = "LangDetect: Confidence Score" +CNST_TEST_LANGID_LANG_LINE_NAME = "LangID: Detected Language" +CNST_TEST_LANGID_PROB_LINE_NAME = "LangID: Confidence Score" + +# Output file names +CNST_LANG_DETECTION_RESULTS_FILE_NAME = "Language_Detection_Results.csv" +CNST_LANG_DETECTION_DIFFERENCES_FILE_NAME = "Detection_Differences.csv" + +# Functions for detection +def detect_language_with_langdetect(line): + from langdetect import detect_langs + try: + langs = detect_langs(line) + for item in langs: + return item.lang, item.prob + except: + return "Unknown", 0.0 + +def detect_language_with_langid(line): + from langid import classify + lang, prob = classify(line) + return lang, prob + +def translate_text_to_english(text): + # Initialize the Google Translator + translator = Translator() + try: + # Translate the text to English + translated = translator.translate(text, dest='en') + print("") + return translated.text + except Exception as e: + return f"Error in translation: {e}" + +def write_data_to_csv(lines, langdetect_lang_results, langdetect_prob_results, langid_lang_results, langid_prob_results): + data = { + CNST_TEXT_LINE_NAME: lines, + CNST_TEST_LANGDETECT_LANG_LINE_NAME: langdetect_lang_results, + CNST_TEST_LANGDETECT_PROB_LINE_NAME: langdetect_prob_results, + CNST_TEST_LANGID_LANG_LINE_NAME: langid_lang_results, + CNST_TEST_LANGID_PROB_LINE_NAME: langid_prob_results + } + df = pd.DataFrame(data) + df.to_csv(CNST_LANG_DETECTION_RESULTS_FILE_NAME, sep="|", index=False) + +def extract_differences(file_name): + df = pd.read_csv(file_name, sep="|") + differences = df[df[CNST_TEST_LANGDETECT_LANG_LINE_NAME] != df[CNST_TEST_LANGID_LANG_LINE_NAME]] + differences.to_csv(CNST_LANG_DETECTION_DIFFERENCES_FILE_NAME, sep="|", index=False) + +# Visualization for Non-Technical Users +def show_results_for_non_tech_users(file_name): + df = pd.read_csv(file_name, sep="|") + sns.set_theme(style="whitegrid") + + print("\nLanguage Detection Results Summary") + print("=" * 50) + + # Display simple text-based insights + detected_languages_langdetect = df[CNST_TEST_LANGDETECT_LANG_LINE_NAME].value_counts() + detected_languages_langid = df[CNST_TEST_LANGID_LANG_LINE_NAME].value_counts() + + print("\nMost Common Languages Detected (LangDetect):") + print(detected_languages_langdetect) + + print("\nMost Common Languages Detected (LangID):") + print(detected_languages_langid) + + # Create visualizations + fig, axes = plt.subplots(2, 2, figsize=(16, 12)) + + # Pie chart for LangDetect + axes[0, 0].set_title("LangDetect - Detected Languages", fontsize=14) + detected_languages_langdetect.plot.pie( + autopct='%1.1f%%', + startangle=90, + ax=axes[0, 0], + colors=sns.color_palette("pastel"), + ) + axes[0, 0].set_ylabel('') + + # Pie chart for LangID + axes[0, 1].set_title("LangID - Detected Languages", fontsize=14) + detected_languages_langid.plot.pie( + autopct='%1.1f%%', + startangle=90, + ax=axes[0, 1], + colors=sns.color_palette("pastel"), + ) + axes[0, 1].set_ylabel('') + + # Bar chart for LangDetect confidence + langdetect_mean_probs = df.groupby(CNST_TEST_LANGDETECT_LANG_LINE_NAME)[CNST_TEST_LANGDETECT_PROB_LINE_NAME].mean() + langdetect_mean_probs.sort_values().plot.barh(ax=axes[1, 0], color="skyblue") + axes[1, 0].set_title("LangDetect - Mean Confidence Score", fontsize=14) + axes[1, 0].set_xlabel("Mean Confidence") + + # Bar chart for LangID confidence + langid_mean_probs = df.groupby(CNST_TEST_LANGID_LANG_LINE_NAME)[CNST_TEST_LANGID_PROB_LINE_NAME].mean() + langid_mean_probs.sort_values().plot.barh(ax=axes[1, 1], color="skyblue") + axes[1, 1].set_title("LangID - Mean Confidence Score", fontsize=14) + axes[1, 1].set_xlabel("Mean Confidence") + + plt.tight_layout() + plt.savefig("Language_Detection_Visualization.png") + plt.show() + + print("\nCharts saved as 'Language_Detection_Visualization.png'.") + print("Detailed results are saved in 'Language_Detection_Results.csv'.") + print("Any differences are saved in 'Detection_Differences.csv'.") + +# Tkinter GUI for showing translations +def show_translation_window(original_texts, translated_texts): + # Create a new window (Toplevel) + translation_window = tk.Toplevel() + translation_window.title("Translation Results") + + # Create a scrollable text box to display the input and translated texts + text_box = tk.Text(translation_window, wrap=tk.WORD, width=80, height=20) + text_box.pack(padx=10, pady=10) + + # Display each original and translated pair + for original, translated in zip(original_texts, translated_texts): + text_box.insert(tk.END, f"Original: {original}\n") + text_box.insert(tk.END, f"Translated: {translated}\n") + text_box.insert(tk.END, "=" * 50 + "\n") + + text_box.config(state=tk.DISABLED) + +# Main Workflow +def main(): + # Initialize Tkinter root window (hidden) + root = tk.Tk() + root.withdraw() + + # Ask user to select a file + file_path = filedialog.askopenfilename() + with open(file_path, 'r', encoding='utf-8') as file: + lines = file.readlines() + + langdetect_lang_results = [] + langdetect_prob_results = [] + langid_lang_results = [] + langid_prob_results = [] + raw_text = [] + translated_texts = [] + + # Process lines + for line in lines: + line = line.translate(str.maketrans('', '', string.punctuation)).rstrip() + raw_text.append(line) + + # Translate text to English + translated_text = translate_text_to_english(line) + translated_texts.append(translated_text) + + langdetect_lang, langdetect_prob = detect_language_with_langdetect(line) + langdetect_lang_results.append(langdetect_lang) + langdetect_prob_results.append(langdetect_prob) + + langid_lang, langid_prob = detect_language_with_langid(line) + langid_lang_results.append(langid_lang) + langid_prob_results.append(langid_prob) + + # Show translation window + show_translation_window(raw_text, translated_texts) + + # Save results to CSV + write_data_to_csv(raw_text, langdetect_lang_results, langdetect_prob_results, langid_lang_results, langid_prob_results) + extract_differences(CNST_LANG_DETECTION_RESULTS_FILE_NAME) + show_results_for_non_tech_users(CNST_LANG_DETECTION_RESULTS_FILE_NAME) + + root.mainloop() + +if __name__ == "__main__": + main() diff --git a/Language Detection/Langauages Supported.txt b/Language Detection/Langauages Supported.txt new file mode 100644 index 0000000..633895d --- /dev/null +++ b/Language Detection/Langauages Supported.txt @@ -0,0 +1,88 @@ +1. LangDetect (Based on the langdetect package) +The LangDetect library supports a variety of languages. The full list includes, but is not limited to: + +English +Spanish +French +German +Italian +Portuguese +Dutch +Russian +Chinese (Simplified) +Japanese +Korean +Arabic +Hindi +Turkish +Swedish +Finnish +Danish +Polish +Norwegian +Czech +Greek +Romanian +Hungarian +Thai +Indonesian +Bengali +Malay +Ukrainian +Hebrew +Vietnamese +Tamil +Telugu +Swahili +Filipino (Tagalog) +Punjabi +LangDetect supports over 55 languages in total. + +2. LangID (Based on the langid package) +The LangID library is slightly more limited but still supports many common languages. Here's a list of languages it detects: + +English +Spanish +French +German +Italian +Portuguese +Dutch +Russian +Arabic +Chinese (Simplified) +Japanese +Korean +Hindi +Turkish +Swedish +Danish +Polish +Finnish +Norwegian +Greek +Czech +Hungarian +Thai +Indonesian +Bengali +Swahili +Tamil +Telugu +Filipino (Tagalog) +LangID supports a smaller set of approximately 97 languages. + +Summary of Supported Languages +Both libraries can detect a variety of languages, including major ones like: + +English +Spanish +French +German +Italian +Chinese (Simplified & Traditional) +Japanese +Arabic +Hindi +Russian +Portuguese \ No newline at end of file diff --git a/Notes & Task Prioritizer/app.py b/Notes & Task Prioritizer/app.py new file mode 100644 index 0000000..f945b34 --- /dev/null +++ b/Notes & Task Prioritizer/app.py @@ -0,0 +1,146 @@ +import tkinter as tk +from tkinter import messagebox, ttk +from datetime import datetime + +class TodoApp(tk.Tk): + def __init__(self): + super().__init__() + + # Window setup + self.title('Material You To-Do List') + self.geometry('600x550') + self.configure(bg='#F3F4F6') # Light background color + self.resizable(False, False) + + self.tasks = [] + + # Title + self.title_label = tk.Label(self, text="To-Do List", font=('Roboto', 24, 'bold'), bg='#F3F4F6', fg='#333') + self.title_label.pack(pady=20) + + # Input Frame + self.input_frame = tk.Frame(self, bg='#F3F4F6') + self.input_frame.pack(pady=5) + + self.task_input = tk.Entry(self.input_frame, width=22, font=('Roboto', 14), bd=0, relief='flat', bg='#E0E0E0', fg='#333') + self.task_input.pack(side='left', padx=5, pady=5) + + # Priority Dropdown + self.priority_var = tk.StringVar(value='Normal') + self.priority_dropdown = ttk.Combobox(self.input_frame, textvariable=self.priority_var, + values=["Low", "Normal", "High"], width=10, state='readonly') + self.priority_dropdown.pack(side='left', padx=5, pady=5) + + self.btn_add_task = tk.Button(self.input_frame, text="Add Task", command=self.add_task, + bg='#6200EE', fg='white', font=('Roboto', 12), borderwidth=0, relief='flat', padx=10, pady=5) + self.btn_add_task.pack(side='right', padx=5, pady=5) + + # Listbox Frame with Scrollbar + self.list_frame = tk.Frame(self) + self.list_frame.pack(pady=10) + + self.lb_tasks = tk.Listbox(self.list_frame, width=50, height=12, font=('Roboto', 12), + bg='#FFFFFF', fg='#333', selectbackground='#D1E7DD', borderwidth=0) + self.lb_tasks.pack(side='left') + + self.scrollbar = tk.Scrollbar(self.list_frame) + self.scrollbar.pack(side='right', fill='y') + + self.lb_tasks.config(yscrollcommand=self.scrollbar.set) + self.scrollbar.config(command=self.lb_tasks.yview) + + # Button Frame + self.button_frame = tk.Frame(self, bg='#F3F4F6') + self.button_frame.pack(pady=10) + + # Task Buttons + self.btn_delete = tk.Button(self.button_frame, text="Delete", command=self.delete_task, + bg='#FF5252', fg='white', font=('Roboto', 12), borderwidth=0, relief='flat', width=12) + self.btn_delete.pack(side='left', padx=5, pady=5) + + self.btn_delete_all = tk.Button(self.button_frame, text="Delete All", command=self.delete_all_tasks, + bg='#FF5252', fg='white', font=('Roboto', 12), borderwidth=0, relief='flat', width=12) + self.btn_delete_all.pack(side='left', padx=5, pady=5) + + # Save Buttons + self.btn_save = tk.Button(self.button_frame, text="Save Task", command=self.save_selected_task, + bg='#03A9F4', fg='white', font=('Roboto', 12), borderwidth=0, relief='flat', width=12) + self.btn_save.pack(side='left', padx=5, pady=5) + + self.btn_save_all = tk.Button(self.button_frame, text="Save All Tasks", command=self.save_all_tasks, + bg='#03A9F4', fg='white', font=('Roboto', 12), borderwidth=0, relief='flat', width=12) + self.btn_save_all.pack(side='left', padx=5, pady=5) + + # Task Count Label + self.task_count_label = tk.Label(self, text="", bg='#F3F4F6', font=('Roboto', 12)) + self.task_count_label.pack(pady=5) + + self.update_task_count() + + def add_task(self): + """Add a new task to the list.""" + task_text = self.task_input.get().strip() + task_priority = self.priority_var.get() + + if task_text: + time_added = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + self.tasks.append({'task': task_text, 'time_added': time_added, 'priority': task_priority}) + self.update_listbox() + self.task_input.delete(0, 'end') + self.update_task_count() + else: + messagebox.showwarning("Input Error", "Please enter a task.") + + def delete_task(self): + """Delete the selected task.""" + selected_task_index = self.lb_tasks.curselection() + if selected_task_index: + del self.tasks[selected_task_index[0]] + self.update_listbox() + self.update_task_count() + else: + messagebox.showwarning("Selection Error", "Please select a task to delete.") + + def delete_all_tasks(self): + """Clear all tasks from the list.""" + self.tasks = [] + self.update_listbox() + self.update_task_count() + + def save_selected_task(self): + """Save the selected task to a file.""" + selected_task_index = self.lb_tasks.curselection() + if selected_task_index: + task = self.tasks[selected_task_index[0]] + with open("./saved_task.txt", "w") as f: + f.write(f"Task: {task['task']}\nPriority: {task['priority']}\nAdded: {task['time_added']}\n") + messagebox.showinfo("Save Task", "Selected task saved to 'saved_task.txt'") + else: + messagebox.showwarning("Selection Error", "Please select a task to save.") + + def save_all_tasks(self): + """Save all tasks to a file.""" + if self.tasks: + with open("all_tasks.txt", "w") as f: + for task in self.tasks: + f.write(f"Task: {task['task']}\nPriority: {task['priority']}\nAdded: {task['time_added']}\n\n") + messagebox.showinfo("Save All Tasks", "All tasks saved to 'all_tasks.txt'") + else: + messagebox.showwarning("No Tasks", "No tasks available to save.") + + def update_listbox(self): + """Update the task list display.""" + self.lb_tasks.delete(0, "end") + for task in self.tasks: + task_display = f"{task['task']} | Priority: {task['priority']} | Added: {task['time_added']}" + self.lb_tasks.insert("end", task_display) + + def update_task_count(self): + """Update the task count label.""" + task_count = len(self.tasks) + self.task_count_label.config(text=f"Number of tasks: {task_count}") + + +if __name__ == '__main__': + app = TodoApp() + app.mainloop() diff --git a/Sentiment Chatbot/app.py b/Sentiment Chatbot/app.py new file mode 100644 index 0000000..81cd4c3 --- /dev/null +++ b/Sentiment Chatbot/app.py @@ -0,0 +1,255 @@ +import streamlit as st +from PIL import Image +import pytesseract +import cv2 +import numpy as np +from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer +import matplotlib.pyplot as plt +import matplotlib.patches as mpatches +import speech_recognition as sr +from langdetect import detect, DetectorFactory +import re +from spellchecker import SpellChecker +import requests +from bs4 import BeautifulSoup + +DetectorFactory.seed = 0 + +def preprocess_text(text): + text = re.sub(r'[^a-zA-Z\s]', '', text) + text = text.lower() + return text + +def correct_spelling(text): + spell = SpellChecker() + corrected_words = [] + for word in text.split(): + corrected_word = spell.correction(word) + if corrected_word is not None: + corrected_words.append(corrected_word) + else: + corrected_words.append(word) + return ' '.join(corrected_words) + +def detect_sentiment(input_text): + try: + if not input_text.strip(): + return "Please enter a paragraph.", "", {}, 0, 0 + + if detect(input_text) != 'en': + return "Please provide text in English.", "", {}, 0, 0 + + input_text = preprocess_text(input_text) + input_text = correct_spelling(input_text) + + sid_obj = SentimentIntensityAnalyzer() + sentiment_scores = sid_obj.polarity_scores(input_text) + word_sentiments = {} + positive_words = [] + negative_words = [] + + for word in input_text.split(): + word_sentiment = sid_obj.polarity_scores(word)['compound'] + word_sentiments[word] = word_sentiment + if word_sentiment > 0: + positive_words.append(word) + elif word_sentiment < 0: + negative_words.append(word) + + total_words = len(positive_words) + len(negative_words) + if total_words == 0: + return ("Can't provide text sentiment due to the following reasons:\n" + "1. Check if the text is in English.\n" + "2. Check the spellings of words for more accurate results."), "", {}, 0, 0 + + positive_percentage = len(positive_words) / total_words * 100 + negative_percentage = len(negative_words) / total_words * 100 + + compound_score = sentiment_scores['compound'] + if compound_score >= 0.05: + overall_sentiment = 'Positive' + sentiment_emoji = '😊' + elif compound_score <= -0.05: + overall_sentiment = 'Negative' + sentiment_emoji = '😞' + else: + overall_sentiment = 'Neutral' + sentiment_emoji = '😐' + + num_positive_words = len(positive_words) + num_negative_words = len(negative_words) + + response_message = (f"