Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| import json | |
| from transformers import pipeline | |
| def load_model(model_name): | |
| return pipeline("text-generation", model=model_name) | |
| def main(): | |
| st.title("Prebid Module Generator") | |
| st.write("Enter a Prebid module, such as 'appnexusBidAdapter', and get a generated Prebid installed module output starting from that setting onward. Using '[' will generate common Prebid modules from the beginning. The model currently has a capped output of 1000 characters.") | |
| st.subheader("Intended Uses") | |
| st.write("This model is designed to assist publishers in understanding and exploring what modules most publishers use with their Prebid set-up. It can serve as a valuable reference to gain insights into common Prebid modules, best practices, and different approaches used by publishers across various domains. The model should be seen as a helpful tool to gain inspiration and understanding of common Prebid modules but not as a substitute for thorough testing and manual review of the final modules used.") | |
| st.write("To learn more about the default model, visit the [Prebid_Module_GPT2 model page](https://huggingface.co/PeterBrendan/Prebid_Module_GPT2). You can also refer to the [official Prebid Documentation on modules](https://docs.prebid.org/dev-docs/modules/) for more information.") | |
| st.write("*Note:* The model may take some time to generate the output. Please wait 30-60 seconds for it to generate.") | |
| # Default Prebid modules | |
| default_modules = ["[", "appnexusBidAdapter","ttdBidAdapter", "rubiconBidAdapter", "dfpAdServerVideo", "pubmaticBidAdapter", "gptPreAuction"] | |
| # Create a selectbox for default prompts | |
| default_module = st.selectbox("Choose a default Prebid module:", default_modules) | |
| # Create a text input field for custom prompt | |
| custom_module = st.text_input("Enter a custom Prebid module:", "") | |
| # Check if a default module is selected | |
| if default_module: | |
| user_input = default_module | |
| else: | |
| user_input = custom_module | |
| # Check if the user input is empty | |
| if user_input: | |
| # Select the model | |
| model_name = "PeterBrendan/Prebid_Module_GPT2" | |
| # Load the Hugging Face model | |
| generator = load_model(model_name) | |
| # Display 'Generating Output' message | |
| output_placeholder = st.empty() | |
| with output_placeholder: | |
| st.write("Generating Output...") | |
| # Generate text based on user input | |
| generated_text = generator(user_input, max_length=1000, num_return_sequences=1)[0]["generated_text"] | |
| # Clear 'Generating Output' message and display the generated text | |
| output_placeholder.empty() | |
| st.write("Generated Text:") | |
| try: | |
| parsed_json = json.loads(generated_text) | |
| beautified_json = json.dumps(parsed_json, indent=4) | |
| st.code(beautified_json, language="json") | |
| except json.JSONDecodeError: | |
| st.write(generated_text) | |
| # Run the app | |
| if __name__ == "__main__": | |
| main() | |