File size: 1,884 Bytes
c748bcd
 
 
 
 
 
 
1cfb9ce
c748bcd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a6a70f9
c748bcd
 
 
 
 
a6a70f9
c748bcd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import streamlit as st
import twint
import pandas as pd


# add banner image
st.header("Data Scraper App")
st.image('twitter.jpg')
st.subheader('''
A simple app to scrap data from Twitter.
''')

# form to collect searcy query and other conditions
my_form = st.form(key='Twitter_form')
search_query = my_form.text_input('Input your search query')
data_limit = my_form.slider('How many tweets do you want to get?', 
                    10, 
                    3000, 
                    value= 100,
                    step=10)

output_csv = my_form.radio('Save data to a CSV file?', 
                        ['Yes', 'No'])
file_name = my_form.text_input('Name the CSV file:')
submit = my_form.form_submit_button(label='Search')

# function to show output in pandas dataframe with specific folumns 
def twint_to_pd(columns):
  return twint.output.panda.Tweets_df[columns]

# configure twint to serach the query
if submit:
    config = twint.Config()
    config.Search = search_query
    config.Limit = data_limit
    config.Pandas = True
    if output_csv == "Yes":
        config.Store_csv = True
        config.Output = '{}.csv'.format(file_name)
    twint.run.Search(config)

    st.subheader("Results: Sample Data")
    if output_csv == "Yes":
        # show data in pandas dataframe
        data = pd.read_csv('{}.csv'.format(file_name),usecols=['date','username','tweet'])
        st.table(data)
    else:
        data = twint_to_pd(["date","username","tweet"])
        st.table(data)
        
    
    #download the dataframe
    @st.cache
    def convert_df(df):
        # IMPORTANT: Cache the conversion to prevent computation on every rerun
        return df.to_csv().encode('utf-8')

    csv = convert_df(data)

    st.download_button(
        label="Download scrapped data as CSV",
        data=csv,
        file_name='{}.csv'.format(file_name),
        mime='text/csv',
 )