File size: 6,995 Bytes
2a12b77
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
import streamlit as st
from streamlit import session_state as sst


from utils import navigate_to
from inference.config import DEBUG_MODE

from inference.translate import extract_filter_img, transcribe_menu_model,classify_menu_text
from inference.preprocess_image import preprocess_text

import os
import time
import pandas as pd
from PIL import Image
from typing import List
import json
from concurrent.futures import ThreadPoolExecutor, as_completed

# Setting workers to be 70% of all available virtual cpus in system
cpu_count = os.cpu_count()
pool = ThreadPoolExecutor(max_workers=int(cpu_count*0.7) )

# Function that handles logic of explaining menu items from manual input
async def manual_input_page():
    
    """
    Function that takes text input from user in input box of streamlit, user can add multiple text boxes and submit finally.

    Parameters:
        None

    Returns:
        List[str]: List of strings, containing item names of a menu in english.
    """
    
    st.write("This is the Manual Input Page.")
    st.write("Once done, click on 'Explain My Menu' button to get explanations for each item ... ")

    inp_texts = []
    num_text_boxes = st.number_input("Number of text boxes", min_value=1, step=1)
    for i in range(num_text_boxes):
        text_box = st.text_input(f"Food item {i+1}")
        if text_box:
            inp_texts.append(text_box)

    if len(inp_texts) > 0:
        
        # Show user submit button only if they have entered some text and set text in session state
        sst["user_entered_items"] = inp_texts
        st.button("Explain My Menu",on_click=navigate_to,args=("Inference",))
    
    else:
        st.write("Please enter some items to proceed ...")
    
    
    st.button("Go back Home", on_click=navigate_to, args=("Home",))


# Function that handles logic of explaining menu items from image uploads
async def image_input_page():
    """
    Function that contains content of main page i.e., image uploader and submit button to navigate to next page.
    Upon submit , control goes to model inference 'page'.

    Parameters:
        None
    
    Returns:
        None
    """

    st.write("This is the Image Input Page.")

    # Streamlit function to upload an image from any device
    uploaded_file = st.file_uploader("Choose an image...",
                                 type=["jpg", "jpeg", "png"])
    
    # Remove preivous states' value of input image if it exists
    sst.pop('input_image', None)

    # Submit button
    if uploaded_file is not None:
        image = Image.open(uploaded_file)

        # Only show if user wants to see
        if st.checkbox('Show Uploaded Image'):
            st.image(image,
                    caption='Uploaded Image',
                    use_column_width=True)

        sst["input_image"] = image
        
        # Show user submit button only if they have uploaded an image
        st.button("Translate My Menu",
                  on_click = navigate_to,
                  args = ("Inference",))
        

        # Warning message to user
        st.info("""This application is for education purposes only. It uses AI, hence it's dietary
                    recommendations are not to be taken as medical advice, author doesn't bear responsibility
                    for incorrect dietary recommendations. Please proceed with caution.
                    """)
    
    # if user wants to go back, make sure to reset the session state
    st.button("Go back Home", on_click=navigate_to, args=("Home",))


# Function that handles model inference
async def model_inference_page():
    
    """
    Function that pre-processes input text from state variables, does concurrent inference
    and toggles state between pages if needed.

    Parameters:
        None
    Returns:
        None

    """
    
    second_title = st.empty()
    second_title.title(" Using ML to explain your menu items ... ")
    
    # User can either upload an image or enter text manually, we check for both
    if "input_image" in sst:
        image = sst["input_image"]

        msg1 = st.empty()
        msg1.write("Pre-processing and extracting text out of your image ....")
        # Call the extract_filter_img function
        filtered_text = await extract_filter_img(image)
        num_items_detected = len(filtered_text)
    

    if "user_entered_items" in sst:
        user_text = sst["user_entered_items"]
        st.write("Pre-processing and filtering text from user input ....")

        filtered_text = [preprocess_text(ut) for ut in user_text]

        num_items_detected = len(filtered_text)
    

    # irrespective of source of user entry , we check if we have any items to process
    if num_items_detected == 0:
        st.write("We couldn't detect any menu items ( indian for now ) from your image, please try a different image by going back.")

    elif num_items_detected > 0:
        st.write(f"Detected {num_items_detected} menu items from your input image ... ")
        
        msg2 = st.empty()
        msg2.write("All pre-processing done, transcribing your menu items now ....")
        st_trans_llm = time.perf_counter()

        await dist_llm_inference(filtered_text)
        
        msg3 = st.empty()                                            
        msg3.write("Done transcribing ... ")
        en_trans_llm = time.perf_counter()

        msg2.empty(); msg3.empty()
        st.success("Image processed successfully! " )

        # Some basic stats for debug mode
        if DEBUG_MODE:
            llm_time_sec = en_trans_llm - st_trans_llm
            st.write("Time took to summarize by LLM {}".format(llm_time_sec))
            

    # If user clicked in "translate_another" button reset all session state variables and go back to home
    st.button("Go back Home", on_click=navigate_to, args=("Home",))


# Function that performs LLM inference on a single item             
async def dist_llm_inference(inp_texts: List[str]) -> None:

    """ 
    Function that performs concurrent LLM inference using threadpool. It displays 
    results of those threads that are done with execution, as a dynamic row to streamlit table, rather than 
    waiting for all threads to be done.

    Parameters:
        inp_texts: List[str], required -> List of strings, containing item names of a menu in english.

    Returns:
        None
    """
    
    df = pd.DataFrame([('ITEM NAME', 'EXPLANATION')]
                     )
    
    sl_table = st.table(df)
    tp_futures = { pool.submit(transcribe_menu_model, mi): mi for mi in inp_texts }
    
    for tpftr in as_completed(tp_futures):

        item = tp_futures[tpftr]

        try:
            exp = tpftr.result()

   
            sl_table.add_rows([(item,
                                str(exp ))
                                ] 
                                )

        except Exception as e:
            print("Could not add a new row dynamically, because of this error:", e)
    
    return