BootesVoid's picture
Upload folder using huggingface_hub
f1f70cc verified
import gradio as gr
import os
import pandas as pd
from googleapiclient.discovery import build
from transformers import pipeline
# Set up YouTube Data API credentials and initialize
api_key = "AIzaSyBUX6ak7fd2KEh-2aUM_aH26jVEw6Wj5V4" # Replace with your own API key
youtube = build('youtube', 'v3', developerKey=api_key)
# Initialize sentiment analysis pipeline
sentiment_pipeline = pipeline("sentiment-analysis")
def get_video_comments(video_id, max_length=512):
comments = []
next_page_token = None
while True:
response = youtube.commentThreads().list(
part='snippet',
videoId=video_id,
pageToken=next_page_token if next_page_token else ''
).execute()
for item in response['items']:
comment = item['snippet']['topLevelComment']['snippet']['textDisplay']
# Truncate the comment if it exceeds the maximum length
comment = comment[:max_length]
comments.append(comment)
next_page_token = response.get('nextPageToken')
if not next_page_token:
break
return comments
def analyze_sentiment(comments):
if comments: # Ensure there are comments to analyze
results = sentiment_pipeline(comments)
return results
else:
return []
def process_video(yt_link):
video_id = yt_link.split("=")[-1] # Extract video ID from the link
comments = get_video_comments(video_id)
sentiment_results = analyze_sentiment(comments)
# Create a DataFrame from the comments and sentiment analysis results
df = pd.DataFrame({
'Comments': comments,
'Sentiment': [result['label'] for result in sentiment_results],
'Score': [result['score'] for result in sentiment_results]
})
return df
# Define the Gradio interface
iface = gr.Interface(
fn=process_video,
inputs=gr.Textbox(lines=2, placeholder="Enter YouTube video URL here..."),
outputs="dataframe",
title="YouTube Video Comments Sentiment Analysis",
description="Enter a YouTube video link to analyze the sentiment of its comments."
)
# Launch the interface
iface.launch(share=True)