File size: 5,413 Bytes
691ae9d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import pandas as pd
from transformers import AutoTokenizer, AutoModel
from sentence_transformers import SentenceTransformer, util
import numpy as np
import torch

def load_data(file_obj):
    # Assuming file_obj is a file-like object uploaded via Gradio, use `pd.read_excel` directly on it
    return pd.read_excel(file_obj)

def generate_embeddings(df, model, Column):
    embeddings_list = []
    for index, row in df.iterrows():
        if type(row["title"]) == str and type(row[Column]) == str:
            print(index)
            content = row["title"] + "\n" + row[Column]
            embeddings = model.encode(content, convert_to_tensor=True)
            embeddings_list.append(embeddings)
        else:
            embeddings_list.append(np.nan)
    df['Embeddings'] = embeddings_list
    return df


def process_categories(categories, model):
    embeddings_listc = [model.encode(cat['bio'], convert_to_tensor=True) for cat in categories]
    category_df = pd.DataFrame({
        'Category': [d['bio'] for d in categories],
        'Expert': [d['expert'] for d in categories],
        'Embeddings': embeddings_listc
    })
    return category_df


def match_categories(df, category_df):
    categories_list, experts_list, scores_list = [], [], []
    for ebd_content in df['Embeddings']:
        if type(ebd_content) != float:
            cos_scores = util.cos_sim(ebd_content, torch.stack(list(category_df['Embeddings']), dim=0))[0]
            max_index = cos_scores.tolist().index(max(cos_scores))
            categories_list.append(category_df.loc[max_index, 'Category'])
            experts_list.append(category_df.loc[max_index, 'Expert'])
            scores_list.append(float(max(cos_scores)))
        else:
            categories_list.append(np.nan)
            experts_list.append(np.nan)
            scores_list.append(np.nan)
    df["Category"] = categories_list
    df["Expert"] = experts_list
    df["Score"] = scores_list
    return df


def save_data(df, filename):
    new_filename = filename.replace(".", "_classified.")
    df.to_excel(new_filename, index=False)
    return new_filename


def classification(column, file_path):
    # Load data
    df = load_data(file_path)

    # Initialize models
    model_ST = SentenceTransformer("all-mpnet-base-v2")

    # Generate embeddings for df
    df = generate_embeddings(df, model_ST, column)

    # Categories
    categories = [
    {
    "expert": "mireille",
    "bio":"expert in security, interested in protection of confidentiality, privacy, integrity, authentication and authorization. Also interested in distributed trust, end-user trust models, secure element, key provisioning, Residential Gateway"
    },
    {
    "expert":"khawla",
    "bio":"expert in inter-connection of Standalone Non-Public Network (SNPN) and cyber-security related topics of such types of networks including distributed trust, distributed ledge, blockchain, authentication, private networks security, provisioning of credentials"
    },
    {
    "expert":"guillaume",
    "bio":"expert in distributed networks and communication, such as mesh network, ad-hoc networks multi-hop network, and the cyber-security of such topologies. Swarm of Drones and Unmanned Aerial Vehicles may deployed such network infrastructure. It is essential to look at how devices/UE authenticate to these networks, and assess the threats and provide counter measures"
    },
    {
    "expert":"vincent",
    "bio":"expert in USIM and related over-the-air services to manage the USIM e.g. Steering of Roaming (SoR), roaming services, network selection, UE configuration or configuration in the Secure Element or USIM"
    },
    {
    "expert":"pierre",
    "bio":"expert in eco-design, intereted in societal impact of technology, wants to push Key Value concepts to 3GPP and in particular defines Key Value Indicators (KVI) in the service requirements. Energy saving and energy efficiency are key aspects in eco-design, as well as carbon emissions and global use of the telecommunication technologies"
    },
    {
    "expert":"ly-thanh",
    "bio":"expert in service requirements of new services defines in new Study Items (SID) ad Work Items (WID). Has to detect low signals of new trends and technologies e.g. Artificial Intelligence (AI/ML), Metaverse new trust concepts, new network topologies, and new topics that may have an impact on the USIM services or over-the-air services. Thes impacts may by new threats or opportunities for the USIM/Secure Element/Card/Roaming services business."
    },
    {
    "expert":"nicolas",
    "bio":"expert in satellite, and Non Terrestrial Network NTN, is interested in Private Networks SNPN, IoT, Inter Satellite communication, Geo Stationnary Satellite GEO, Low Orbite Satellite LEO, Medium Orbite Satellite MEO, Radio Access Network RAN"
    },
    {
    "expert":"dorin",
    "bio":"Public Safety Communication, Military Communication, Emeregency Calls, Emergency Services, Disaster Communication Access, PLMN Access During Disasters, Emergency Communication Enhancements, Ultra reliable low latency communication URLLC, Tactical Bubble, Private Network, Proximity Services PROSE, Radio Access Network RAN, Mission Critical Services MCS"
    }
]
    category_df = process_categories(categories, model_ST)

    # Match categories
    df = match_categories(df, category_df)

    # Save data
    return save_data(df,file_path), df