RAHULJUNEJA33 commited on
Commit
cc39f3a
Β·
verified Β·
1 Parent(s): 2821442

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +83 -23
app.py CHANGED
@@ -1,39 +1,97 @@
1
- from transformers import pipeline
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import streamlit as st
 
 
 
3
  import os
4
 
5
  # Load Hugging Face Token from Environment (Set this in Hugging Face Spaces Secrets)
6
- HF_TOKEN = os.getenv("HF_TOKEN")
7
 
8
- # Select model: Use a quantized, smaller model like DistilGPT-2
9
- MODEL_NAME = "distilgpt2" # Change to any of the small models mentioned above
10
-
11
- # Load Model
12
  @st.cache_resource
13
  def load_model():
14
  try:
 
15
  return pipeline("text-generation", model=MODEL_NAME, token=HF_TOKEN)
16
  except Exception as e:
17
- st.error(f"❌ Error loading model: {str(e)}")
18
- return None
19
 
 
20
  generator = load_model()
21
 
22
- # Function to Generate Functional Requirement Document
23
  def generate_functional_requirements(topic):
24
  if generator is None:
25
- return "Error: Model not loaded properly."
26
 
 
27
  prompt = f"Generate a comprehensive functional requirement document for {topic} in the banking sector."
28
 
29
- # Generate content
30
- output = generator(prompt, max_length=800, do_sample=True, temperature=0.8)
31
- return output[0]['generated_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
- # Streamlit UI
34
  def main():
35
- st.title("πŸ“„ AI-Powered Functional Requirement Generator for Banking")
36
 
 
37
  banking_topics = [
38
  "Core Banking System",
39
  "Loan Management System",
@@ -46,18 +104,20 @@ def main():
46
  "Wealth & Portfolio Management"
47
  ]
48
 
 
49
  topic = st.selectbox("Select a Banking Functional Requirement Topic", banking_topics)
50
 
 
51
  if st.button("Generate Functional Requirement Document"):
52
- with st.spinner("Generating... This may take a while."):
53
- content = generate_functional_requirements(topic)
54
  if "Error" in content:
55
- st.error(content)
56
  else:
57
- filename = "functional_requirement.pdf"
58
- save_to_pdf(content, filename)
59
- st.success("βœ… Functional Requirement Document Generated!")
60
- st.download_button(label="πŸ“₯ Download PDF", data=open(filename, "rb"), file_name=filename, mime="application/pdf")
61
 
62
  if __name__ == "__main__":
63
- main()
 
1
+ # ---------------------------------------------------------------------------
2
+ # Model Recommendations and Descriptions:
3
+ # ---------------------------------------------------------------------------
4
+
5
+ # 1. **DistilGPT-2**: A distilled, lighter version of GPT-2.
6
+ # - **Size**: ~500 MB (smaller and faster than full GPT-2).
7
+ # - **Performance**: Great for text generation tasks with reduced inference time.
8
+ # - **Use case**: If you need a lightweight, efficient model with decent quality.
9
+ # - **Hugging Face Model**: "distilgpt2"
10
+ #
11
+ # 2. **GPT-Neo 1.3B**: A smaller alternative to GPT-3 with reasonable performance.
12
+ # - **Size**: ~5 GB.
13
+ # - **Performance**: Powerful text generation model with good results on a wide variety of tasks.
14
+ # - **Use case**: Ideal for slightly larger tasks where better quality is needed than DistilGPT-2.
15
+ # - **Hugging Face Model**: "EleutherAI/gpt-neo-1.3B"
16
+ #
17
+ # 3. **Mistral 7B**: Open-source model with smaller weights.
18
+ # - **Size**: ~4.5 GB.
19
+ # - **Performance**: Comparable to larger models, with good speed and quality.
20
+ # - **Use case**: Ideal for high-quality generation at a relatively smaller size.
21
+ # - **Hugging Face Model**: "mistralai/Mistral-7B-Instruct-v0.1"
22
+ #
23
+ # 4. **TinyBERT or MiniLM**: Excellent for lighter tasks, even smaller models.
24
+ # - **Size**: ~100 MB.
25
+ # - **Performance**: Great for tasks like classification or sentence embeddings but not suitable for long text generation.
26
+ # - **Use case**: Perfect for applications requiring minimal memory and fast processing but not for full-fledged generation.
27
+ # - **Hugging Face Model**: "sentence-transformers/all-MiniLM-L6-v2"
28
+
29
+ # Choose your model from the above options:
30
+ MODEL_NAME = "distilgpt2" # Change this to one of the other models based on your needs.
31
+
32
+ # ---------------------------------------------------------------------------
33
+ # Code Below to Load, Generate, and Save Functional Requirement Documents
34
+ # ---------------------------------------------------------------------------
35
+
36
  import streamlit as st
37
+ from transformers import pipeline
38
+ from reportlab.lib.pagesizes import letter
39
+ from reportlab.pdfgen import canvas
40
  import os
41
 
42
  # Load Hugging Face Token from Environment (Set this in Hugging Face Spaces Secrets)
43
+ HF_TOKEN = os.getenv("HF_TOKEN") # Token from your Hugging Face account for accessing models.
44
 
45
+ # Load Model - Using Streamlit's caching to improve performance and load model only once
 
 
 
46
  @st.cache_resource
47
  def load_model():
48
  try:
49
+ # Initialize the Hugging Face pipeline with the selected model for text generation.
50
  return pipeline("text-generation", model=MODEL_NAME, token=HF_TOKEN)
51
  except Exception as e:
52
+ st.error(f"❌ Error loading model: {str(e)}") # Error handling for failed model loading
53
+ return None # Return None if model loading fails
54
 
55
+ # Initialize the model generator
56
  generator = load_model()
57
 
58
+ # Function to generate functional requirement document based on the selected topic
59
  def generate_functional_requirements(topic):
60
  if generator is None:
61
+ return "Error: Model not loaded properly." # Return error if the model is not loaded
62
 
63
+ # Define the prompt based on the selected topic for generating functional requirements
64
  prompt = f"Generate a comprehensive functional requirement document for {topic} in the banking sector."
65
 
66
+ # Use the text generation model to generate content
67
+ output = generator(prompt, max_length=800, do_sample=True, temperature=0.8) # Generate text with certain constraints
68
+ return output[0]['generated_text'] # Return the generated text
69
+
70
+ # Function to save generated content as a PDF
71
+ def save_to_pdf(content, filename):
72
+ c = canvas.Canvas(filename, pagesize=letter) # Initialize canvas to create a PDF
73
+ c.setFont("Helvetica", 10) # Set the font for the PDF
74
+
75
+ text = c.beginText(40, 750) # Set the starting position for the text on the PDF
76
+ text.setLeading(14) # Set line height for text
77
+
78
+ # Iterate through each line of the content and add it to the PDF
79
+ for line in content.split("\n"):
80
+ text.textLine(line) # Add the line to the PDF
81
+ if text.getY() < 50: # Check if the text has gone below the bottom margin
82
+ c.drawText(text) # Draw the current text
83
+ c.showPage() # Create a new page in the PDF
84
+ text = c.beginText(40, 750) # Reset the text position for the new page
85
+ text.setLeading(14) # Reset the line height
86
+
87
+ c.drawText(text) # Draw any remaining text
88
+ c.save() # Save the PDF file
89
 
90
+ # Streamlit UI - User Interface for interacting with the app
91
  def main():
92
+ st.title("πŸ“„ AI-Powered Functional Requirement Generator for Banking") # Title for the app
93
 
94
+ # Define a list of banking topics that users can select from
95
  banking_topics = [
96
  "Core Banking System",
97
  "Loan Management System",
 
104
  "Wealth & Portfolio Management"
105
  ]
106
 
107
+ # Dropdown menu to select a topic
108
  topic = st.selectbox("Select a Banking Functional Requirement Topic", banking_topics)
109
 
110
+ # Button to trigger the document generation
111
  if st.button("Generate Functional Requirement Document"):
112
+ with st.spinner("Generating... This may take a while."): # Show a loading spinner while generating
113
+ content = generate_functional_requirements(topic) # Generate the content based on the selected topic
114
  if "Error" in content:
115
+ st.error(content) # Show an error message if the content generation fails
116
  else:
117
+ filename = "functional_requirement.pdf" # Set the filename for the generated PDF
118
+ save_to_pdf(content, filename) # Save the content as a PDF file
119
+ st.success("βœ… Functional Requirement Document Generated!") # Show success message
120
+ st.download_button(label="πŸ“₯ Download PDF", data=open(filename, "rb"), file_name=filename, mime="application/pdf") # Provide a download link for the PDF
121
 
122
  if __name__ == "__main__":
123
+ main() # Run the main function to start the app