import streamlit as st from transformers import AutoTokenizer, AutoModelForSequenceClassification import torch import re import pandas as pd import googleapiclient.discovery import plotly.express as px # Load the BERT tokenizer and model tokenizer = AutoTokenizer.from_pretrained("nlptown/bert-base-multilingual-uncased-sentiment") model = AutoModelForSequenceClassification.from_pretrained("nlptown/bert-base-multilingual-uncased-sentiment") # Set up the YouTube API service api_service_name = "youtube" api_version = "v3" DEVELOPER_KEY = "AIzaSyC4Vx8G6nm3Ow9xq7NluTuCCJ1d_5w4YPE" # Replace with your actual API key youtube = googleapiclient.discovery.build(api_service_name, api_version, developerKey=DEVELOPER_KEY) # Function to fetch comments for a video ID def scrape_comments(video_id): request = youtube.commentThreads().list( part="snippet", videoId=video_id, maxResults=100 ) response = request.execute() comments = [] for item in response['items']: comment = item['snippet']['topLevelComment']['snippet'] comments.append([ comment['textDisplay'] ]) comments_df = pd.DataFrame(comments, columns=['comment']) # df.head(10). return comments_df # Function to extract video ID from YouTube URL def extract_video_id(video_url): match = re.search(r'(?<=v=)[\w-]+', video_url) if match: return match.group(0) else: st.error("Invalid YouTube video URL") # Function to fetch YouTube comments for a video ID def fetch_comments(video_id): # Example using youtube-comment-scraper-python library comments = scrape_comments(video_id) request = youtube.videos().list( part="snippet", id=video_id ) response = request.execute() if response['items']: video_info = response['items'][0]['snippet'] channel_name = video_info['channelTitle'] video_description = video_info['description'] else: channel_name = "Unknown" video_description = "No description available" return comments, channel_name, video_description # Function to analyze sentiment for a single comment def analyze_sentiment(comment): tokens = tokenizer.encode(comment, return_tensors="pt", max_length=512, truncation=True) # input_ids = tokens['input_ids'] # attention_mask = tokens['attention_mask'] # result = model(input_ids, attention_mask=attention_mask) result = model(tokens) sentiment_id = torch.argmax(result.logits) + 1 if(sentiment_id > 3): sentiment_label = "Positive" elif(sentiment_id < 3): sentiment_label = "Negative" else: sentiment_label = "Neutral" return sentiment_label def main(): st.title("YouTube Comments Sentiment Analysis") # Create sidebar section for app description and links st.sidebar.title("Comment Feel") st.sidebar.write("Welcome to the YouTube Comments Sentiment Analysis App 🎥") st.sidebar.write(""" **Description** 📝 This project utilizes a pre-trained sentiment analysis model based on BERT and TensorFlow to analyze the sentiment of comments from a YouTube video. Users can input a YouTube video URL, fetch related comments, and determine their sentiments (positive, negative, or neutral). Input a valid YouTube video URL in the provided text box 🔗. Click "Extract Comments and Analyze" to fetch comments and analyze sentiments 🔄. View sentiment analysis results via pie and bar charts 📊. Credits 🌟 Coder: Aniket Panchal GitHub: https://github.com/Aniket2021448 Contact 📧 For any inquiries or feedback, please contact aniketpanchal1257@gmail.com """) st.sidebar.write("Feel free to check out my other apps :eyes:") with st.sidebar.form("app_selection_form"): st.write("Select an App:") app_links = { "Movie-mind": "https://movie-mind.streamlit.app/", "find-fake-news": "https://find-fake-news.streamlit.app/" } selected_app = st.selectbox("Choose an App", list(app_links.keys())) submitted_button = st.form_submit_button("Go to App") # Handle form submission if submitted_button: selected_app_url = app_links.get(selected_app) if selected_app_url: st.sidebar.success("Redirected successfully!") st.markdown(f'', unsafe_allow_html=True) # Dropdown menu for other app links st.sidebar.write("In case the apps are down, because of less usage") st.sidebar.write("Kindly reach out to me @ aniketpanchal1257@gmail.com") st.write("Enter a YouTube video link below: :movie_camera:") video_url = st.text_input("YouTube Video URL:") if st.button("Extract Comments and Analyze"): video_id = extract_video_id(video_url) if video_id: comments_df = fetch_comments(video_id) comments_df['sentiment'] = comments_df['comment'].apply(lambda x: analyze_sentiment(x[:512])) sentiment_counts = comments_df['sentiment'].value_counts() st.write(f"**Channel Name:** {channel_name}") st.write(f"**Video Description:** {video_description}") st.write("Based on top :100: comments from this video") # Create pie chart st.write("Pie chart representation :chart_with_upwards_trend:") fig_pie = px.pie(values=sentiment_counts.values, names=sentiment_counts.index, title='Sentiment Distribution') st.plotly_chart(fig_pie, use_container_width=True) # Create bar chart st.write("Bar plot representation :bar_chart:") fig_bar = px.bar(x=sentiment_counts.index, y=sentiment_counts.values, labels={'x': 'Sentiment', 'y': 'Count'}, title='Sentiment Counts') st.plotly_chart(fig_bar) if __name__ == "__main__": main()