Spaces:
Sleeping
Sleeping
ablamahfadi
commited on
Commit
•
09afd55
1
Parent(s):
d235111
Upload folder using huggingface_hub
Browse files- README.md +2 -8
- Script-writing-assignment-startup-1 (2).docx +0 -0
- TomWalker-ScriptAI.py +162 -0
- app.py +80 -0
- deploy_model.py +39 -0
- flagged/log.csv +2 -0
- key.txt +72 -0
- needed.txt +23 -0
README.md
CHANGED
@@ -1,12 +1,6 @@
|
|
1 |
---
|
2 |
-
title:
|
3 |
-
|
4 |
-
colorFrom: indigo
|
5 |
-
colorTo: red
|
6 |
sdk: gradio
|
7 |
sdk_version: 3.46.0
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
1 |
---
|
2 |
+
title: test
|
3 |
+
app_file: app.py
|
|
|
|
|
4 |
sdk: gradio
|
5 |
sdk_version: 3.46.0
|
|
|
|
|
6 |
---
|
|
|
|
Script-writing-assignment-startup-1 (2).docx
ADDED
Binary file (18 kB). View file
|
|
TomWalker-ScriptAI.py
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import boto3
|
2 |
+
import paramiko
|
3 |
+
import time
|
4 |
+
#ssh -i "my_kayname_01.pem" root@ec2-54-90-175-47.compute-1.amazonaws.com
|
5 |
+
# Replace these values with your actual values
|
6 |
+
access_key = "AKIAYIBNUHKTYGACURPX"
|
7 |
+
secret_key = "q8uXKrUQZx50zyIrPCUrnH/zvXNqv6QaR20O3Gaz"
|
8 |
+
region = "us-east-1"
|
9 |
+
image_id = "ami-088f3457fef51ee50"
|
10 |
+
instance_type = "r5.large" # Instance type with 32GB RAM
|
11 |
+
key_name = "my_kayname_01" # Replace with your key pair name
|
12 |
+
security_group_ids = ["sg-0452fd39f8266308d"]
|
13 |
+
|
14 |
+
def create_instance():
|
15 |
+
global access_key
|
16 |
+
global secret_key
|
17 |
+
global region
|
18 |
+
global image_id
|
19 |
+
global key_name
|
20 |
+
global security_group_ids
|
21 |
+
|
22 |
+
# Create a Boto3 EC2 client
|
23 |
+
ec2 = boto3.client("ec2", aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=region)
|
24 |
+
|
25 |
+
# Launch an EC2 instance
|
26 |
+
response = ec2.run_instances(
|
27 |
+
ImageId=image_id,
|
28 |
+
InstanceType=instance_type,
|
29 |
+
KeyName=key_name,
|
30 |
+
MinCount=1,
|
31 |
+
MaxCount=1,
|
32 |
+
SecurityGroupIds=security_group_ids # Set the security group IDs here
|
33 |
+
)
|
34 |
+
|
35 |
+
instance = response["Instances"][0]
|
36 |
+
|
37 |
+
instance_id = response["Instances"][0]["InstanceId"]
|
38 |
+
|
39 |
+
print("Instance created:", instance_id)
|
40 |
+
|
41 |
+
response = ec2.describe_instances(InstanceIds=[instance_id])
|
42 |
+
|
43 |
+
# Extract the public IP address from the response
|
44 |
+
instance = response["Reservations"][0]["Instances"][0]
|
45 |
+
public_ip = instance.get("PublicIpAddress")
|
46 |
+
|
47 |
+
print(f"Instance created with IP: {public_ip}")
|
48 |
+
|
49 |
+
while True:
|
50 |
+
# Describe the status of the EC2 instance
|
51 |
+
response = ec2.describe_instance_status(InstanceIds=[instance_id])
|
52 |
+
|
53 |
+
# Extract system status and instance status details if available
|
54 |
+
if "InstanceStatuses" in response and len(response["InstanceStatuses"]) > 0:
|
55 |
+
instance_status = response["InstanceStatuses"][0]
|
56 |
+
system_status = response["InstanceStatuses"][0].get("SystemStatus", {})
|
57 |
+
instance_status_value = instance_status['InstanceStatus']['Status']
|
58 |
+
system_status_value = system_status.get('Status', 'N/A')
|
59 |
+
print(f"Instance Status: {instance_status_value}")
|
60 |
+
print(f"System Status: {system_status_value}")
|
61 |
+
|
62 |
+
if instance_status_value == "ok" and system_status_value == "ok":
|
63 |
+
break
|
64 |
+
|
65 |
+
time.sleep(10)
|
66 |
+
else:
|
67 |
+
print("Status checks not available for the instance.")
|
68 |
+
|
69 |
+
return instance_id, public_ip
|
70 |
+
|
71 |
+
def delete_instance(instance_id):
|
72 |
+
global access_key
|
73 |
+
global secret_key
|
74 |
+
global region
|
75 |
+
|
76 |
+
# Create a Boto3 EC2 client
|
77 |
+
ec2 = boto3.client("ec2", aws_access_key_id=access_key, aws_secret_access_key=secret_key, region_name=region)
|
78 |
+
|
79 |
+
# Terminate the EC2 instance
|
80 |
+
response = ec2.terminate_instances(InstanceIds=[instance_id])
|
81 |
+
|
82 |
+
# Print the response
|
83 |
+
print("Instance termination response:", response)
|
84 |
+
|
85 |
+
def ssh_connect(hostname):
|
86 |
+
#paramiko.util.log_to_file('paramiko.log')
|
87 |
+
#paramiko.common.logging.basicConfig(level=paramiko.common.DEBUG)
|
88 |
+
# Replace these values with your actual ones
|
89 |
+
port = 22
|
90 |
+
username = "ec2-user"
|
91 |
+
#username = "ubuntu"
|
92 |
+
#username = "root"
|
93 |
+
|
94 |
+
key_filename = "my_kayname_01.pem" # Path to your private key file
|
95 |
+
|
96 |
+
# Create an SSH client instance
|
97 |
+
ssh_client = paramiko.SSHClient()
|
98 |
+
|
99 |
+
# Automatically add the server's host key (this is insecure and not recommended for production)
|
100 |
+
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
101 |
+
|
102 |
+
try:
|
103 |
+
# Connect to the EC2 instance
|
104 |
+
ssh_client.connect(hostname, port, username, key_filename=key_filename)
|
105 |
+
|
106 |
+
|
107 |
+
# Execute a command on the remote instance (e.g., list directory contents)
|
108 |
+
stdin, stdout, stderr = ssh_client.exec_command("ls -l")
|
109 |
+
|
110 |
+
# Print the output of the command
|
111 |
+
print(stdout.read().decode("utf-8"))
|
112 |
+
|
113 |
+
# Install Jupyter Notebook using pip
|
114 |
+
install_command = "pip install jupyter"
|
115 |
+
stdin, stdout, stderr = ssh_client.exec_command(install_command)
|
116 |
+
print(stdout.read().decode("utf-8"))
|
117 |
+
|
118 |
+
# Start the Jupyter Notebook server (you can specify your desired options)
|
119 |
+
jupyter_command = "jupyter notebook --ip=0.0.0.0 --port=8888 --no-browser"
|
120 |
+
ssh_client.exec_command(jupyter_command)
|
121 |
+
|
122 |
+
print("Jupyter Notebook installed and server started.")
|
123 |
+
|
124 |
+
# Start the Jupyter Notebook server (you can specify your desired options)
|
125 |
+
jupyter_command = "jupyter server list"
|
126 |
+
while True:
|
127 |
+
stdin, stdout, stderr = ssh_client.exec_command(jupyter_command)
|
128 |
+
stdout_value = stdout.read().decode("utf-8")
|
129 |
+
|
130 |
+
if stdout_value.find("http:") != -1:
|
131 |
+
break
|
132 |
+
|
133 |
+
time.sleep(3)
|
134 |
+
|
135 |
+
print(stdout_value)
|
136 |
+
|
137 |
+
p = stdout_value.find("http:")
|
138 |
+
if p != -1:
|
139 |
+
temp = stdout_value[p:]
|
140 |
+
p = temp.find("?token")
|
141 |
+
if p != -1:
|
142 |
+
temp = temp[p + 7:]
|
143 |
+
p = temp.find(" :: ")
|
144 |
+
token_value = temp[: p]
|
145 |
+
print("token=", token_value)
|
146 |
+
|
147 |
+
# Close the SSH connection
|
148 |
+
ssh_client.close()
|
149 |
+
|
150 |
+
except Exception as e:
|
151 |
+
print(f"Error: {e}")
|
152 |
+
|
153 |
+
|
154 |
+
|
155 |
+
if __name__ == "__main__":
|
156 |
+
|
157 |
+
instance_id, public_ip = create_instance()
|
158 |
+
|
159 |
+
ssh_connect(public_ip)
|
160 |
+
#ssh_connect("44.210.108.226")
|
161 |
+
|
162 |
+
#delete_instance(instance_id)
|
app.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import boto3
|
3 |
+
import sagemaker
|
4 |
+
from sagemaker.huggingface import HuggingFaceModel, get_huggingface_llm_image_uri
|
5 |
+
import gradio as gr
|
6 |
+
|
7 |
+
predictor = None
|
8 |
+
|
9 |
+
def deploy_and_get_interface(hf_model_url):
|
10 |
+
global predictor
|
11 |
+
|
12 |
+
os.system("pip install awscli boto3 sagemaker gradio")
|
13 |
+
|
14 |
+
os.system("aws configure set aws_access_key_id AKIAYIBNUHKT6GS5HZ6V")
|
15 |
+
os.system("aws configure set aws_secret_access_key nbB+yABZeOuyEVGaD1wS1GinB9eEawugg7WkF5hn")
|
16 |
+
os.system("aws configure set default.region us-east-1")
|
17 |
+
os.system("aws configure set default.output json")
|
18 |
+
|
19 |
+
try:
|
20 |
+
role = sagemaker.get_execution_role()
|
21 |
+
except ValueError:
|
22 |
+
iam = boto3.client('iam')
|
23 |
+
role = iam.get_role(RoleName='Moh-work')['Role']['Arn']
|
24 |
+
|
25 |
+
hf_model_id = hf_model_url.split('/')[-2]
|
26 |
+
|
27 |
+
hub = {
|
28 |
+
'HF_MODEL_ID': hf_model_id,
|
29 |
+
'SM_NUM_GPUS': '1'
|
30 |
+
}
|
31 |
+
|
32 |
+
huggingface_model = HuggingFaceModel(
|
33 |
+
image_uri=get_huggingface_llm_image_uri("huggingface", version="1.0.3"),
|
34 |
+
env=hub,
|
35 |
+
role=role
|
36 |
+
)
|
37 |
+
|
38 |
+
predictor = huggingface_model.deploy(
|
39 |
+
initial_instance_count=1,
|
40 |
+
instance_type="ml.g5.2xlarge",
|
41 |
+
container_startup_health_check_timeout=300,
|
42 |
+
)
|
43 |
+
|
44 |
+
def get_prediction(input_text):
|
45 |
+
response = predictor.predict({"inputs": input_text})
|
46 |
+
return response[0]['generated_text']
|
47 |
+
|
48 |
+
iface = gr.Interface(fn=get_prediction,
|
49 |
+
inputs="text",
|
50 |
+
outputs="text",
|
51 |
+
|
52 |
+
title="HuggingFace Model Predictor",
|
53 |
+
description="Enter some text and get the model's prediction!")
|
54 |
+
|
55 |
+
return iface
|
56 |
+
|
57 |
+
def get_prediction(input_text):
|
58 |
+
response = predictor.predict({"inputs": input_text})
|
59 |
+
return response[0]['generated_text']
|
60 |
+
|
61 |
+
def trigger_deploy(hf_model_url):
|
62 |
+
deploy_and_get_interface(hf_model_url)
|
63 |
+
iface = gr.Interface(fn=get_prediction,
|
64 |
+
inputs="text",
|
65 |
+
outputs="text",
|
66 |
+
|
67 |
+
title="HuggingFace Model Predictor",
|
68 |
+
description="Enter some text and get the model's prediction!")
|
69 |
+
iface.launch(share=True)
|
70 |
+
|
71 |
+
def setup_interface():
|
72 |
+
iface = gr.Interface(fn=trigger_deploy,
|
73 |
+
inputs="text",
|
74 |
+
outputs="text",
|
75 |
+
live=True,
|
76 |
+
title="Enter HuggingFace Model URL",
|
77 |
+
description="Please input the URL of the desired HuggingFace model and submit to deploy.")
|
78 |
+
iface.launch(share=True)
|
79 |
+
|
80 |
+
setup_interface()
|
deploy_model.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import sagemaker
|
3 |
+
import boto3
|
4 |
+
from sagemaker.huggingface import HuggingFaceModel, get_huggingface_llm_image_uri
|
5 |
+
|
6 |
+
try:
|
7 |
+
role = sagemaker.get_execution_role()
|
8 |
+
except ValueError:
|
9 |
+
iam = boto3.client('iam')
|
10 |
+
role = iam.get_role(RoleName='Moh-work')['Role']['Arn']
|
11 |
+
|
12 |
+
# Hub Model configuration. https://huggingface.co/models
|
13 |
+
hub = {
|
14 |
+
'HF_MODEL_ID':'WizardLM/WizardMath-7B-V1.0',
|
15 |
+
'SM_NUM_GPUS': json.dumps(1)
|
16 |
+
}
|
17 |
+
|
18 |
+
|
19 |
+
|
20 |
+
# create Hugging Face Model Class
|
21 |
+
huggingface_model = HuggingFaceModel(
|
22 |
+
image_uri=get_huggingface_llm_image_uri("huggingface",version="1.0.3"),
|
23 |
+
env=hub,
|
24 |
+
role=role,
|
25 |
+
)
|
26 |
+
|
27 |
+
# deploy model to SageMaker Inference
|
28 |
+
predictor = huggingface_model.deploy(
|
29 |
+
initial_instance_count=1,
|
30 |
+
instance_type="ml.g5.2xlarge",
|
31 |
+
container_startup_health_check_timeout=300,
|
32 |
+
)
|
33 |
+
|
34 |
+
# send request
|
35 |
+
predictor.predict({
|
36 |
+
"inputs": "create an E-Commernce platform in Next.js 13",
|
37 |
+
})
|
38 |
+
|
39 |
+
predictor.delete_endpoint()
|
flagged/log.csv
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
hf_model_url,output,flag,username,timestamp
|
2 |
+
,,,,2023-10-03 21:45:03.128289
|
key.txt
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"Id": "key-consolepolicy-3",
|
3 |
+
"Version": "2012-10-17",
|
4 |
+
"Statement": [
|
5 |
+
{
|
6 |
+
"Sid": "Enable IAM User Permissions",
|
7 |
+
"Effect": "Allow",
|
8 |
+
"Principal": {
|
9 |
+
"AWS": "arn:aws:iam::567031380647:root"
|
10 |
+
},
|
11 |
+
"Action": "kms:*",
|
12 |
+
"Resource": "*"
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"Sid": "Allow access for Key Administrators",
|
16 |
+
"Effect": "Allow",
|
17 |
+
"Principal": {
|
18 |
+
"AWS": "arn:aws:iam::567031380647:user/Mohammed"
|
19 |
+
},
|
20 |
+
"Action": [
|
21 |
+
"kms:Create*",
|
22 |
+
"kms:Describe*",
|
23 |
+
"kms:Enable*",
|
24 |
+
"kms:List*",
|
25 |
+
"kms:Put*",
|
26 |
+
"kms:Update*",
|
27 |
+
"kms:Revoke*",
|
28 |
+
"kms:Disable*",
|
29 |
+
"kms:Get*",
|
30 |
+
"kms:Delete*",
|
31 |
+
"kms:TagResource",
|
32 |
+
"kms:UntagResource",
|
33 |
+
"kms:ScheduleKeyDeletion",
|
34 |
+
"kms:CancelKeyDeletion"
|
35 |
+
],
|
36 |
+
"Resource": "*"
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"Sid": "Allow use of the key",
|
40 |
+
"Effect": "Allow",
|
41 |
+
"Principal": {
|
42 |
+
"AWS": "arn:aws:iam::567031380647:user/Mohammed"
|
43 |
+
},
|
44 |
+
"Action": [
|
45 |
+
"kms:Encrypt",
|
46 |
+
"kms:Decrypt",
|
47 |
+
"kms:ReEncrypt*",
|
48 |
+
"kms:GenerateDataKey*",
|
49 |
+
"kms:DescribeKey"
|
50 |
+
],
|
51 |
+
"Resource": "*"
|
52 |
+
},
|
53 |
+
{
|
54 |
+
"Sid": "Allow attachment of persistent resources",
|
55 |
+
"Effect": "Allow",
|
56 |
+
"Principal": {
|
57 |
+
"AWS": "arn:aws:iam::567031380647:user/Mohammed"
|
58 |
+
},
|
59 |
+
"Action": [
|
60 |
+
"kms:CreateGrant",
|
61 |
+
"kms:ListGrants",
|
62 |
+
"kms:RevokeGrant"
|
63 |
+
],
|
64 |
+
"Resource": "*",
|
65 |
+
"Condition": {
|
66 |
+
"Bool": {
|
67 |
+
"kms:GrantIsForAWSResource": "true"
|
68 |
+
}
|
69 |
+
}
|
70 |
+
}
|
71 |
+
]
|
72 |
+
}
|
needed.txt
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
the AWS account:
|
2 |
+
|
3 |
+
https://567031380647.signin.aws.amazon.com/console
|
4 |
+
|
5 |
+
|
6 |
+
User name - Mohammed
|
7 |
+
|
8 |
+
|
9 |
+
|
10 |
+
Console password - |tVY5gY[
|
11 |
+
|
12 |
+
|
13 |
+
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
|
14 |
+
|
15 |
+
chatgpt analysis:
|
16 |
+
|
17 |
+
https://chat.openai.com/share/c0831a5a-387e-4eda-86cc-7eb84b5fe5e9
|
18 |
+
|
19 |
+
|
20 |
+
AWS Access Key ID [****************c07f]: AKIAYIBNUHKT6GS5HZ6V
|
21 |
+
AWS Secret Access Key [None]: nbB+yABZeOuyEVGaD1wS1GinB9eEawugg7WkF5hn
|
22 |
+
|
23 |
+
RoleName='Moh-work'
|