File size: 4,802 Bytes
ffccd9e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
import threading
import time
import traceback
import pickle
import os
import requests
import cloudscraper  # Required for Udemy session handling
from base import VERSION, LoginException, Scraper, Udemy, scraper_dict
from colors import bw, by, fb, fg, fr
from requests.utils import cookiejar_from_dict, dict_from_cookiejar

# DUCE-CLI

COOKIE_FILE = "udemy_cookies.pkl"  # File to store cookies


def save_cookies(session):
    """Save session cookies as a dictionary"""
    with open(COOKIE_FILE, "wb") as f:
        pickle.dump(dict_from_cookiejar(session.cookies), f)  # Convert cookies to a dictionary


def load_cookies():
    """Load cookies from a file if it exists and return as a dictionary"""
    if os.path.exists(COOKIE_FILE):
        with open(COOKIE_FILE, "rb") as f:
            cookies = pickle.load(f)
            if isinstance(cookies, dict):  # Ensure cookies are stored as a dictionary
                return cookies
    return None


def create_scraping_thread(site: str):
    """Creates a separate thread to scrape each site"""
    code_name = scraper_dict[site]
    try:
        t = threading.Thread(target=getattr(scraper, code_name), daemon=True)
        t.start()

        while getattr(scraper, f"{code_name}_length") == 0:
            time.sleep(0.1)  # Avoid busy waiting

        if getattr(scraper, f"{code_name}_length") == -1:
            raise Exception(f"Error in: {site}")

        print(f"Scraping {site} completed successfully.")

    except Exception as e:
        error = traceback.format_exc()
        print(f"Error in {site}: {error}")


############## MAIN #############
udemy = Udemy("cli")
udemy.load_settings()
login_title, main_title = udemy.check_for_update()

if "Update" in login_title:
    print(login_title)

login_successful = False
session = cloudscraper.create_scraper()  # Use cloudscraper instead of requests.Session()

# Attempt to use saved cookies first
cookies = load_cookies()
if cookies:
    print("Trying to log in using saved cookies...")
    try:
        session.cookies = cookiejar_from_dict(cookies)  # Convert dict back to cookie jar
        udemy.client = session  # Attach session to Udemy object
        udemy.cookie_dict = cookies  # Set the cookie dictionary
        udemy.get_session_info()  # Check if session is valid
        print(f"Logged in as {udemy.display_name} using cookies βœ…")
        login_successful = True
    except LoginException:
        print("Cookies expired or invalid. Switching to manual login.")
        os.remove(COOKIE_FILE)  # Delete invalid cookies

# If cookies are not valid, use email/password login
if not login_successful:
    while not login_successful:
        try:
            if udemy.settings.get("email") and udemy.settings.get("password"):
                email, password = udemy.settings["email"], udemy.settings["password"]
                print(f"Trying to log in using saved credentials: {email}")
            else:
                email = input("Email: ")
                password = input("Password: ")

            udemy.manual_login(email, password)
            udemy.get_session_info()  # Ensure login was successful

            # Save successful login credentials
            udemy.settings["email"], udemy.settings["password"] = email, password
            udemy.save_settings()

            # Save cookies after successful login
            save_cookies(udemy.client)

            print(f"Logged in as {udemy.display_name} βœ…")
            login_successful = True

        except LoginException as e:
            print(f"Login Failed: {e}")
            if "Browser" in login_title:
                print("Can't login using cookies")
                os.remove(COOKIE_FILE)  # Delete invalid cookies
            elif "Email" in login_title:
                udemy.settings["email"], udemy.settings["password"] = "", ""
                udemy.save_settings()

print(fg + f"Logged in as {udemy.display_name}")

# Check if the user has valid settings
if udemy.is_user_dumb():
    print(bw + fr + "Invalid settings! Exiting.")
    exit()

scraper = Scraper(udemy.sites)

try:
    # Scrape courses
    print("Starting to scrape free Udemy courses...")
    udemy.scraped_data = scraper.get_scraped_courses(create_scraping_thread)
    time.sleep(0.5)
    print("\nScraping completed. Enrolling in courses...\n")

    # Start enrolling
    udemy.start_enrolling()

    print(f"\nSuccessfully Enrolled: {udemy.successfully_enrolled_c}")
    print(f"Amount Saved: {round(udemy.amount_saved_c, 2)} {udemy.currency.upper()}")
    print(f"Already Enrolled: {udemy.already_enrolled_c}")
    print(f"Excluded Courses: {udemy.excluded_c}")
    print(f"Expired Courses: {udemy.expired_c}")

except Exception as e:
    print(f"Error:\n{traceback.format_exc()}\n")

input("Press Enter to exit...")