Spaces:
Running
Running
File size: 5,319 Bytes
e24c25f 47f045f e24c25f fa8dc09 e24c25f fa8dc09 e24c25f fa8dc09 e24c25f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
from flask import Flask, request, render_template, redirect, url_for
from transformers import AutoTokenizer, AutoModel
import torch
import os
os.environ["TOKENIZERS_PARALLELISM"] = "false"
app = Flask(__name__)
# Dictionary to store programs and their courses
programs = {}
# Default model name
current_model_name = 'sentence-transformers/all-mpnet-base-v2'
# Function to load the tokenizer and model dynamically
def load_model_and_tokenizer(model_name):
try:
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModel.from_pretrained(model_name)
return tokenizer, model, None
except Exception as e:
return None, None, str(e)
# Load the initial model and tokenizer
tokenizer, model, error = load_model_and_tokenizer(current_model_name)
def mean_pooling(token_embeddings, mask):
"""Applies mean pooling to token embeddings, considering the mask."""
mask = mask.unsqueeze(-1).expand(token_embeddings.size())
sum_embeddings = torch.sum(token_embeddings * mask, dim=1)
sum_mask = torch.clamp(mask.sum(dim=1), min=1e-9) # Avoid division by zero
return sum_embeddings / sum_mask
def compute_plo_embeddings():
"""Computes embeddings for the predefined PLOs."""
tokens = tokenizer(plos, padding=True, truncation=True, return_tensors='pt')
mask = tokens['attention_mask']
with torch.no_grad():
outputs = model(**tokens)
return mean_pooling(outputs.last_hidden_state, mask)
plos = [
"Analyze a complex computing problem and apply principles of computing and other relevant disciplines to identify solutions.",
"Design, implement, and evaluate a computing-based solution to meet a given set of computing requirements.",
"Communicate effectively in a variety of professional contexts.",
"Recognize professional responsibilities and make informed judgments in computing practice based on legal and ethical principles.",
"Function effectively as a member or leader of a team engaged in activities appropriate to the program’s discipline.",
"Support the delivery, use, and management of information systems within an information systems environment."
]
# Compute PLO embeddings (once at startup)
plo_embeddings = compute_plo_embeddings()
def get_similarity(input_sentence):
"""Calculates the similarity between an input sentence and predefined PLOs."""
tokens = tokenizer(input_sentence, padding=True, truncation=True, return_tensors='pt')
mask = tokens['attention_mask']
with torch.no_grad():
outputs = model(**tokens)
input_embedding = mean_pooling(outputs.last_hidden_state, mask)
similarities = torch.nn.functional.cosine_similarity(input_embedding, plo_embeddings)
return similarities
@app.route('/')
def index():
"""Home page displaying current programs and model status."""
return render_template('index.html', programs=programs, model_name=current_model_name)
@app.route('/set_model', methods=['POST'])
def set_model():
"""Allows users to dynamically change the model."""
global tokenizer, model, plo_embeddings, current_model_name
model_name = request.form['model_name']
tokenizer, model, error = load_model_and_tokenizer(model_name)
if error:
return render_template('index.html', programs=programs, message=f"Error loading model: {error}")
# Update the global model name and recompute embeddings
current_model_name = model_name
plo_embeddings = compute_plo_embeddings()
return redirect(url_for('index'))
@app.route('/addprogram', methods=['GET', 'POST'])
def add_program():
"""Adds a new program."""
if request.method == 'POST':
program_name = request.form['program_name']
if program_name not in programs:
programs[program_name] = {} # Initialize an empty dictionary for courses
return redirect(url_for('index'))
return render_template('addprogram.html')
@app.route('/addcourse', methods=['GET', 'POST'])
def create_course():
"""Creates a new course under a specific program."""
if request.method == 'POST':
program_name = request.form['program']
course_name = request.form['course_name']
outcomes = request.form['course_outcomes'].split('\n')
if program_name in programs:
programs[program_name][course_name] = outcomes # Add course to the selected program
return redirect(url_for('index'))
return render_template('addcourse.html', programs=programs)
@app.route('/match', methods=['POST'])
def match_outcomes():
"""Matches course outcomes with predefined PLOs."""
program_outcomes = request.form['program_outcomes']
print(program_outcomes)
course_outcomes = request.form['course_outcomes'].split('\n')
results = []
for co in course_outcomes:
co = co.strip()
if co: # Ensure the outcome is not empty
similarities = get_similarity(co)
top_matches_indices = similarities.topk(3).indices.tolist()
results.append({
'course_outcome': co,
'program_outcomes' : program_outcomes,
'best_matches': top_matches_indices
})
return render_template('result.html', results=results)
if __name__ == '__main__':
app.run(debug=True)
|