File size: 1,954 Bytes
4f14a8f
 
 
 
 
 
 
 
 
 
 
 
 
 
6c00c5c
 
4f14a8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31abaaf
4f14a8f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6c00c5c
4f14a8f
 
20e4702
e76468d
4f14a8f
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import streamlit as st
import time
import pandas as pd
import altair as alt
from multipage import MultiPage
from transformers import pipeline

def app():
    st.markdown('## Mask Fill task')
    st.write('Write a sentence with a [MASK] gap to fill')
    st.markdown('## ')
    

    @st.cache(allow_output_mutation=True, suppress_st_warning =True, show_spinner=False)
    def get_model(model):
        return pipeline('fill-mask', model=model)

    def create_graph(answer):
        x_bar = [i['token_str'] for i in answer]
        y_bar = [i['score'] for i in answer]
        chart_data = pd.DataFrame(y_bar, index=x_bar)
        data = pd.melt(chart_data.reset_index(), id_vars=["index"])
        # Horizontal stacked bar chart
        chart = (
            alt.Chart(data)
            .mark_bar(color='#d7abf5')
            .encode(
                x=alt.X("index", type="nominal", title='',sort=alt.EncodingSortField(field="index", op="count", order='ascending')),
                y=alt.Y("value", type="quantitative", title="Score", sort='-x'),
            )
        )
        st.altair_chart(chart, use_container_width=True)

        
    col1, col2 = st.columns([2,1])


    with col1:
        prompt= st.text_area('Your prompt here',
            '''Who is Elon [MASK]?''') 
            
    with col2:
        select_model = st.radio(
            "Select the model to use:",
            ('Bert cased', 'Bert Un-cased'), index = 1)

        if select_model == 'Bert cased':
            model = 'bert-base-cased'
        elif select_model == 'Bert Un-cased':
            model = 'bert-base-uncased'

        with st.spinner('Loading Model... (This may take a while)'):
            unmasker = get_model(model)    
            st.success('Model loaded correctly!')
        
            gen = st.info('Generating Mask...')
            answer = unmasker(prompt)     
            gen.empty()    

    with col1:   
        create_graph(answer)