lunarflu HF staff commited on
Commit
c71d91b
1 Parent(s): 0507b21

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +56 -54
app.py CHANGED
@@ -100,6 +100,62 @@ def update_google_sheet():
100
  print(f"------------------------------------------------------------------------")
101
  except Exception as e:
102
  print(f"update_google_sheet Error: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
 
104
  executor = ThreadPoolExecutor(max_workers=2)
105
  scheduler = BackgroundScheduler(executors={'default': executor})
@@ -614,61 +670,7 @@ async def remove_huggingfolks():
614
  #---------------------------------------------------------------------------------------------
615
  # UPDATE dataframe WITH HFAPI INFO on a timer
616
 
617
- #@tasks.loop(minutes=1) tasks.loop leads to heartbeat blocked issues (merging calculations too much with normal discord bot functions)
618
- def update_hub_stats():
619
- try:
620
- global global_df
621
- print("Updating hub stats...")
622
- print(f"------------------------------------------------------------------------")
623
-
624
- for index, row in global_df.iterrows():
625
- # fill blank values with n/a for now? then replace if they try to verify
626
 
627
- # data type of value?
628
- # may need this to be fairly long -> complete cycles successfully before starting new job
629
-
630
- user = row['hf_user_name']
631
- if pd.notna(user):
632
- print(f"user: {user}")
633
- url = f"https://huggingface.co/api/users/{user}/overview"
634
- #print(f"url: {url}")
635
- response = requests.get(url)
636
- #print(f"response: {response}")
637
- if response.status_code == 200:
638
- data = response.json()
639
- #print(f"data: {data}")
640
- likes = data["numLikes"]
641
- models = data["numModels"]
642
- datasets = data["numDatasets"]
643
- spaces = data["numSpaces"]
644
- discussions = data["numDiscussions"]
645
- papers = data["numPapers"]
646
- upvotes = data["numUpvotes"]
647
-
648
- # recalculate level as well (no longer only depends on discord activity)
649
- # + total_exp as well (with multipliers)
650
-
651
- try:
652
- global_df.loc[index, 'likes'] = likes
653
- global_df.loc[index, 'models'] = models
654
- global_df.loc[index, 'datasets'] = datasets
655
- global_df.loc[index, 'spaces'] = spaces
656
- global_df.loc[index, 'discussions'] = discussions
657
- global_df.loc[index, 'papers'] = papers
658
- global_df.loc[index, 'upvotes'] = upvotes
659
- except Exception as e:
660
- print(f"{e} error updating the dataframe")
661
-
662
- else:
663
- print(f"Failed to retrieve data for user {user}. Status code: {response.status_code}")
664
-
665
-
666
- except Exception as e:
667
- print(f"Failed to parse data for user {user}. {e}")
668
- timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
669
- print(f"------------------------------------------------------------------------")
670
- print(f"Hub stats successfully updated at {timestamp}! \n{global_df}")
671
- print(f"------------------------------------------------------------------------")
672
 
673
 
674
  #---------------------------------------------------------------------------------------------
 
100
  print(f"------------------------------------------------------------------------")
101
  except Exception as e:
102
  print(f"update_google_sheet Error: {e}")
103
+
104
+ #@tasks.loop(minutes=1) tasks.loop leads to heartbeat blocked issues (merging calculations too much with normal discord bot functions)
105
+ def update_hub_stats():
106
+ try:
107
+ global global_df
108
+ print("Updating hub stats...")
109
+ print(f"------------------------------------------------------------------------")
110
+
111
+ for index, row in global_df.iterrows():
112
+ # fill blank values with n/a for now? then replace if they try to verify
113
+
114
+ # data type of value?
115
+ # may need this to be fairly long -> complete cycles successfully before starting new job
116
+
117
+ user = row['hf_user_name']
118
+ if pd.notna(user):
119
+ print(f"user: {user}")
120
+ url = f"https://huggingface.co/api/users/{user}/overview"
121
+ #print(f"url: {url}")
122
+ response = requests.get(url)
123
+ #print(f"response: {response}")
124
+ if response.status_code == 200:
125
+ data = response.json()
126
+ #print(f"data: {data}")
127
+ likes = data["numLikes"]
128
+ models = data["numModels"]
129
+ datasets = data["numDatasets"]
130
+ spaces = data["numSpaces"]
131
+ discussions = data["numDiscussions"]
132
+ papers = data["numPapers"]
133
+ upvotes = data["numUpvotes"]
134
+
135
+ # recalculate level as well (no longer only depends on discord activity)
136
+ # + total_exp as well (with multipliers)
137
+
138
+ try:
139
+ global_df.loc[index, 'likes'] = likes
140
+ global_df.loc[index, 'models'] = models
141
+ global_df.loc[index, 'datasets'] = datasets
142
+ global_df.loc[index, 'spaces'] = spaces
143
+ global_df.loc[index, 'discussions'] = discussions
144
+ global_df.loc[index, 'papers'] = papers
145
+ global_df.loc[index, 'upvotes'] = upvotes
146
+ except Exception as e:
147
+ print(f"{e} error updating the dataframe")
148
+
149
+ else:
150
+ print(f"Failed to retrieve data for user {user}. Status code: {response.status_code}")
151
+
152
+
153
+ except Exception as e:
154
+ print(f"Failed to parse data for user {user}. {e}")
155
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
156
+ print(f"------------------------------------------------------------------------")
157
+ print(f"Hub stats successfully updated at {timestamp}! \n{global_df}")
158
+ print(f"------------------------------------------------------------------------")
159
 
160
  executor = ThreadPoolExecutor(max_workers=2)
161
  scheduler = BackgroundScheduler(executors={'default': executor})
 
670
  #---------------------------------------------------------------------------------------------
671
  # UPDATE dataframe WITH HFAPI INFO on a timer
672
 
 
 
 
 
 
 
 
 
 
673
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
674
 
675
 
676
  #---------------------------------------------------------------------------------------------