abhi99555 commited on
Commit
eaa32b6
·
1 Parent(s): a66a853

Create proper_main.py

Browse files
Files changed (1) hide show
  1. proper_main.py +141 -0
proper_main.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ try:
2
+ import requests
3
+ import os
4
+ import subprocess as sp
5
+ from bs4 import BeautifulSoup
6
+ from nbconvert import PythonExporter
7
+ import shutil
8
+
9
+ except Exception as e:
10
+ print("Some modules are missing:", e)
11
+ print("Do you want to install them via this Python program?")
12
+ option = input("Y or N: ")
13
+ if option.lower() not in ["y", "n"]:
14
+ exit()
15
+ elif option.lower() == "n":
16
+ exit()
17
+ elif option.lower() == "y":
18
+ print("Make sure your internet connection is active; otherwise, it may throw an error. Press 'N' to exit.")
19
+ curr_dir = os.getcwd()
20
+ os.system("pip install -r " + curr_dir + "/requirements.txt")
21
+
22
+
23
+
24
+
25
+
26
+ def web_scrape(user_url,st):
27
+ username=user_url[19:]
28
+ if username.endswith("/"):
29
+ username=username[:-1]
30
+ print(username)
31
+ base_url = f"https://api.github.com/users/{username}/repos"
32
+
33
+ response = requests.get(base_url)
34
+ if response.status_code != 200:
35
+ return ("Please provide a valid link.",1)
36
+ st.text("Extracting the Repos")
37
+ repos = []
38
+ repositories = response.json()
39
+ for repo in repositories:
40
+
41
+ repo_name = repo["name"]
42
+
43
+ repos.append("https://github.com/"+username + "/" +repo_name)
44
+ return repos,0
45
+
46
+
47
+ def data_cloning(repos,st):
48
+
49
+ if os.path.isdir("/tmp/repos"):
50
+ shutil.rmtree("/tmp/repos")
51
+
52
+ os.mkdir("/tmp/repos")
53
+
54
+
55
+ st.text("Cloning the Repos")
56
+ os.chdir("/tmp/repos")
57
+ for i in repos:
58
+ sp.run(["git", "clone", i], stdout=sp.DEVNULL, stderr=sp.DEVNULL)
59
+
60
+ return os.getcwd()
61
+
62
+
63
+ def data_cleaning(directory,st):
64
+ exporter = PythonExporter()
65
+ st.text("Cleaning the Repos")
66
+
67
+ if len(os.listdir(os.getcwd())) ==0:
68
+ st.text("Not a Valid Repo")
69
+ return
70
+
71
+ for root, dirs, files in os.walk(directory, topdown=False):
72
+ for filename in files:
73
+ file_path = os.path.join(root, filename)
74
+
75
+ #if filename.endswith(".ipynb"):
76
+ #output, _ = exporter.from_filename(file_path)
77
+ #with open(os.path.join(root, filename[:-6] + ".py"), "w") as script_file:
78
+ # script_file.write(output)
79
+ #os.remove(file_path)
80
+
81
+ if not (filename.endswith(".py") or filename.endswith(".ipynb")):
82
+ os.remove(file_path)
83
+
84
+ for dir_name in dirs:
85
+ dir_path = os.path.join(root, dir_name)
86
+ if not os.listdir(dir_path):
87
+ os.rmdir(dir_path)
88
+
89
+
90
+ def analyse(st):
91
+ project_and_grades = {}
92
+ report_analysis = {}
93
+ st.text("Analysing...")
94
+ if len(os.listdir(os.getcwd())) ==0:
95
+ st.text("Not a Valid Repo")
96
+ return
97
+
98
+ for file in os.listdir(os.getcwd()):
99
+ print(file)
100
+ path = os.getcwd() + "/" + file
101
+
102
+ cmd = ["radon", "cc", "--total-average","--include-ipynb", file]
103
+ res = sp.check_output(cmd)
104
+ index = res.decode().find("Average")
105
+ if index <= 0:
106
+ grade = "A"
107
+ score = 0.5
108
+ else:
109
+ grade = res.decode()[index:]
110
+ score = grade[23:-1]
111
+ score = score[:3]
112
+ grade=grade[20]
113
+
114
+
115
+ project_and_grades["Repo " + file] = " Score " + str(score)
116
+ report_analysis["Repo " + file] = [float(score)]
117
+ shutil.rmtree('/tmp/repos')
118
+
119
+ return project_and_grades,report_analysis
120
+
121
+
122
+
123
+ def self_analysis(report_analysis):
124
+ score= max(report_analysis.values())
125
+ for keyss in report_analysis:
126
+ if report_analysis[keyss]==score:
127
+ repo = keyss
128
+ return repo,score
129
+
130
+
131
+
132
+ """def main():
133
+ web_scrape()
134
+ curr_path=data_cloning()
135
+ data_cleaning(curr_path)
136
+ report=analyse()
137
+ print(report)
138
+
139
+ if __name__ == main():
140
+ main()
141
+ """