File size: 6,437 Bytes
3e36ef4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4e7fd67
3e36ef4
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173

from boto3 import session
from botocore.client import Config
import os
ACCESS_ID = os.getenv('ACCESS_ID', '')
SECRET_KEY = os.getenv('SECRET_KEY', '')

LOCAL_WEIGHTS_DIR = 'weights'
DO_SPACE='sing'
def upload_to_do(file_path):
    
    boto_session=session.Session()
    client = boto_session.client('s3',
                            region_name='nyc3',
                            endpoint_url='https://nyc3.digitaloceanspaces.com',
                            aws_access_key_id=ACCESS_ID,
                            aws_secret_access_key=SECRET_KEY)
    filename_only = os.path.basename(file_path)
    # Upload a file to your Space
    response=client.upload_file(file_path, 'sing', filename_only)

    
    return response
    
def download_from_do(file_key):
    boto_session = session.Session()
    client = boto_session.client('s3',
                                 region_name='nyc3',
                                 endpoint_url='https://nyc3.digitaloceanspaces.com',
                                 aws_access_key_id=ACCESS_ID,
                                 aws_secret_access_key=SECRET_KEY)
    
      # Ensure the downloads directory exists
    downloads_dir = 'downloads'
    if not os.path.exists(downloads_dir):
        os.makedirs(downloads_dir)
    
    # Set the full local path for the download
    full_local_path = os.path.join(downloads_dir, file_key)
    
    # Download the file from your Space
    client.download_file('sing', file_key, full_local_path)
    
    # Verify the download
    if os.path.exists(full_local_path):
        print(f"File downloaded successfully to {full_local_path}")
        return full_local_path
    else:
        print("Download failed.")
        return None

def get_local_models(prefix):
    """Get list of model files starting with prefix in the local directory."""
    models = [f for f in os.listdir(LOCAL_WEIGHTS_DIR) if f.startswith(prefix) and f.endswith('.pth')]
    return models

def get_do_models(client, prefix):
    """Get list of model files starting with prefix in the DO space."""
    paginator = client.get_paginator('list_objects')
    page_iterator = paginator.paginate(Bucket=DO_SPACE, Prefix=prefix)

    models = []
    for page in page_iterator:
        models.extend([obj['Key'] for obj in page['Contents'] if obj['Key'].endswith('.pth')])
    return models

def sync_missing_models(client, local_models, do_models):
    """Download missing model files from DO space."""
    missing_models = set(do_models) - set(local_models)
    print('missing models:',missing_models)
    for model in missing_models:
        client.download_file(DO_SPACE, model, os.path.join(LOCAL_WEIGHTS_DIR, model))
        print(f"Downloaded {model} from DO space to local weights directory.")


def list_models(email_prefix):
    
    

    #ensure_local_directory_exists()
    local_models = get_local_models(email_prefix)

    # Initialize DO S3 client
    boto_session = session.Session()
    client = boto_session.client('s3',
                                 region_name='nyc3',
                                 endpoint_url='https://nyc3.digitaloceanspaces.com',
                                 aws_access_key_id=ACCESS_ID,
                                 aws_secret_access_key=SECRET_KEY)

    
    do_models = get_do_models(client, email_prefix)
    sync_missing_models(client, local_models, do_models)

    # Return the updated list of local models after syncing
    updated_local_models = get_local_models(email_prefix)
    print(updated_local_models)
    #return jsonify(updated_local_models)



def download_from_do_with_prefix(prefix):
    boto_session = session.Session()
    client = boto_session.client('s3',
                                 region_name='nyc3',
                                 endpoint_url='https://nyc3.digitaloceanspaces.com',
                                 aws_access_key_id=ACCESS_ID,
                                 aws_secret_access_key=SECRET_KEY)
    
    # Ensure the downloads directory exists
    downloads_dir = 'downloads'
    if not os.path.exists(downloads_dir):
        os.makedirs(downloads_dir)
    
    # List objects in the Space with the specified prefix
    response = client.list_objects(Bucket='sing', Prefix=prefix)
    print(response)
    downloaded_files = []
    if 'Contents' in response:
        for obj in response['Contents']:
            file_key = obj['Key']
            
            # Set the full local path for the download
            full_local_path = os.path.join(downloads_dir, os.path.basename(file_key))
            
            # Download the file from your Space
            client.download_file('sing', file_key, full_local_path)
            
            # Verify the download and add to the list if successful
            if os.path.exists(full_local_path):
                print(f"File downloaded successfully to {full_local_path}")
                downloaded_files.append(full_local_path)
            else:
                print(f"Download failed for {file_key}.")
    else:
        print("No files found with the specified prefix.")
    
    return downloaded_files if downloaded_files else None
# Initiate session

def ensure_model_in_weights_dir(model_name):
    weights_dir = 'weights'
    model_path = os.path.join(weights_dir, model_name)

    # Check if the model already exists
    if os.path.exists(model_path):
        print(f"Model {model_name} already exists in {weights_dir}.")
        return True

    # If the model does not exist, attempt to download it
    print(f"Model {model_name} not found in {weights_dir}. Attempting to download...")

    # Initialize a session using DigitalOcean Spaces
    boto_session = session.Session()
    client = boto_session.client('s3',
                                 region_name='nyc3',
                                 endpoint_url='https://nyc3.digitaloceanspaces.com',
                                 aws_access_key_id=ACCESS_ID,
                                 aws_secret_access_key=SECRET_KEY)

    # Ensure the weights directory exists
    if not os.path.exists(weights_dir):
        os.makedirs(weights_dir)

    # Attempt to download the model file
    try:
        client.download_file('sing', f"{model_name}", model_path)
        print(f"Model {model_name} downloaded successfully to {model_path}.")
        return True
    except Exception as e:
        print(f"Failed to download {model_name}: {e}")
        return False