abdull4h commited on
Commit
728c8e6
Β·
verified Β·
1 Parent(s): d47ff86

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -17
app.py CHANGED
@@ -1,8 +1,17 @@
1
  import gradio as gr
2
  import os
3
  from huggingface_hub import login, hf_hub_download
4
- from huggingface_hub import spaces # <-- new import for ZeroGPU
5
 
 
 
 
 
 
 
 
 
 
 
6
  hf_token = os.environ.get("HF_TOKEN")
7
  if hf_token:
8
  print(f"Found HF_TOKEN in environment (length: {len(hf_token)})")
@@ -14,12 +23,8 @@ if hf_token:
14
  else:
15
  print("No HF_TOKEN found in environment variables")
16
 
17
- @spaces.GPU
18
- def check_model_access():
19
- """
20
- Because we've decorated this function with @spaces.GPU,
21
- a ZeroGPU session is allocated right before the function runs.
22
- """
23
  model_id = "CohereForAI/c4ai-command-r7b-arabic-02-2025"
24
  try:
25
  print(f"Attempting to access {model_id}...")
@@ -42,26 +47,46 @@ def check_model_access():
42
  print(f"Error accessing model: {error_msg}")
43
  return f"❌ Error accessing {model_id}: {error_msg}"
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  with gr.Blocks() as demo:
46
  gr.Markdown("# CohereForAI/c4ai-command-r7b-arabic-02-2025 Access Test")
47
-
 
48
  auth_status = "βœ… Token found" if hf_token else "❌ No token found"
49
- gr.Markdown(f"**Authentication Status:** {auth_status}")
50
-
 
51
  check_btn = gr.Button("Test Model Access")
52
  result = gr.Textbox(label="Result", lines=10)
53
-
54
  check_btn.click(fn=check_model_access, inputs=None, outputs=result)
55
-
56
  gr.Markdown("""
57
  ## Next Steps
 
58
  If the test above is successful, it confirms you can access the model.
59
  To set up a fully functional demo:
60
-
61
- 1. Configure your Space to use ZeroGPU hardware in Settings β†’ Hardware
62
- 2. Update your code to use the model for text generation with @spaces.GPU
63
-
 
64
  This test simply checks if your authentication is working correctly with the model repository.
65
  """)
66
 
67
- demo.launch()
 
1
  import gradio as gr
2
  import os
3
  from huggingface_hub import login, hf_hub_download
 
4
 
5
+ # Import spaces as a separate package (not from huggingface_hub)
6
+ try:
7
+ import spaces
8
+ SPACES_AVAILABLE = True
9
+ print("Successfully imported spaces package")
10
+ except ImportError:
11
+ SPACES_AVAILABLE = False
12
+ print("Spaces package not available, will run without GPU acceleration")
13
+
14
+ # Try to authenticate explicitly
15
  hf_token = os.environ.get("HF_TOKEN")
16
  if hf_token:
17
  print(f"Found HF_TOKEN in environment (length: {len(hf_token)})")
 
23
  else:
24
  print("No HF_TOKEN found in environment variables")
25
 
26
+ # Define model access function
27
+ def check_model_access_impl():
 
 
 
 
28
  model_id = "CohereForAI/c4ai-command-r7b-arabic-02-2025"
29
  try:
30
  print(f"Attempting to access {model_id}...")
 
47
  print(f"Error accessing model: {error_msg}")
48
  return f"❌ Error accessing {model_id}: {error_msg}"
49
 
50
+ # Use spaces.GPU decorator if available, otherwise use function directly
51
+ if SPACES_AVAILABLE:
52
+ try:
53
+ @spaces.GPU
54
+ def check_model_access():
55
+ return check_model_access_impl()
56
+ except Exception as e:
57
+ print(f"Error with spaces.GPU: {e}")
58
+ # Fallback to non-GPU version
59
+ def check_model_access():
60
+ return check_model_access_impl()
61
+ else:
62
+ # No spaces package, use direct implementation
63
+ def check_model_access():
64
+ return check_model_access_impl()
65
+
66
+ # Create Gradio interface
67
  with gr.Blocks() as demo:
68
  gr.Markdown("# CohereForAI/c4ai-command-r7b-arabic-02-2025 Access Test")
69
+
70
+ # Show status information
71
  auth_status = "βœ… Token found" if hf_token else "❌ No token found"
72
+ spaces_status = "βœ… Available" if SPACES_AVAILABLE else "❌ Not available"
73
+ gr.Markdown(f"**Authentication Status:** {auth_status} | **Spaces GPU Support:** {spaces_status}")
74
+
75
  check_btn = gr.Button("Test Model Access")
76
  result = gr.Textbox(label="Result", lines=10)
 
77
  check_btn.click(fn=check_model_access, inputs=None, outputs=result)
78
+
79
  gr.Markdown("""
80
  ## Next Steps
81
+
82
  If the test above is successful, it confirms you can access the model.
83
  To set up a fully functional demo:
84
+
85
+ 1. Make sure you have the `spaces` package installed in requirements.txt
86
+ 2. Configure your Space to use ZeroGPU hardware in Settings β†’ Hardware
87
+ 3. Update your code to use the model for text generation with @spaces.GPU
88
+
89
  This test simply checks if your authentication is working correctly with the model repository.
90
  """)
91
 
92
+ demo.launch()