nothere990 commited on
Commit
2f1d976
Β·
1 Parent(s): 228d63b
Files changed (3) hide show
  1. Dockerfile +11 -8
  2. app.py +51 -0
  3. requirements.txt +2 -0
Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM python:3.9-slim-buster
2
 
3
  # Install system dependencies
4
  RUN apt -qq update && \
@@ -32,18 +32,17 @@ RUN apt -qq update && \
32
  libavutil-dev \
33
  libswscale-dev \
34
  libswresample-dev \
35
- neofetch && \
36
- apt-get clean && \
37
  rm -rf /var/lib/apt/lists/
38
 
39
- # Install Chrome using your preferred method
40
  RUN mkdir -p /tmp/ && \
41
  cd /tmp/ && \
42
  wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
43
  dpkg -i ./google-chrome-stable_current_amd64.deb || apt-get install -fqqy && \
44
  rm ./google-chrome-stable_current_amd64.deb
45
 
46
- # Install Chromedriver using your specified method
47
  RUN mkdir -p /tmp/ && \
48
  cd /tmp/ && \
49
  wget -O chromedriver.zip https://chromedriver.storage.googleapis.com/$(curl -sS https://chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip && \
@@ -59,7 +58,8 @@ ENV HOME=/home/user \
59
  PATH=/home/user/.local/bin:$PATH \
60
  CHROME_DRIVER=/usr/bin/chromedriver \
61
  CHROME_BIN=/usr/bin/google-chrome-stable \
62
- PYTHONUNBUFFERED=1
 
63
 
64
  # Set working directory
65
  WORKDIR $HOME/app
@@ -67,10 +67,13 @@ WORKDIR $HOME/app
67
  # Switch to non-root user
68
  USER user
69
 
 
 
 
70
  # Copy requirements first to leverage Docker cache
71
  COPY --chown=user requirements.txt .
72
- RUN pip install --no-cache-dir --upgrade pip \
73
- && pip install --no-cache-dir -r requirements.txt
74
 
75
  # Copy application code
76
  COPY --chown=user . .
 
1
+ FROM python:3.9.5-slim-buster
2
 
3
  # Install system dependencies
4
  RUN apt -qq update && \
 
32
  libavutil-dev \
33
  libswscale-dev \
34
  libswresample-dev \
35
+ && apt-get clean && \
 
36
  rm -rf /var/lib/apt/lists/
37
 
38
+ # Install Chrome
39
  RUN mkdir -p /tmp/ && \
40
  cd /tmp/ && \
41
  wget https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb && \
42
  dpkg -i ./google-chrome-stable_current_amd64.deb || apt-get install -fqqy && \
43
  rm ./google-chrome-stable_current_amd64.deb
44
 
45
+ # Install Chromedriver
46
  RUN mkdir -p /tmp/ && \
47
  cd /tmp/ && \
48
  wget -O chromedriver.zip https://chromedriver.storage.googleapis.com/$(curl -sS https://chromedriver.storage.googleapis.com/LATEST_RELEASE)/chromedriver_linux64.zip && \
 
58
  PATH=/home/user/.local/bin:$PATH \
59
  CHROME_DRIVER=/usr/bin/chromedriver \
60
  CHROME_BIN=/usr/bin/google-chrome-stable \
61
+ PYTHONUNBUFFERED=1 \
62
+ PIP_CACHE_DIR=/home/user/.cache/pip # Add pip cache directory
63
 
64
  # Set working directory
65
  WORKDIR $HOME/app
 
67
  # Switch to non-root user
68
  USER user
69
 
70
+ # Create cache directory and set permissions
71
+ RUN mkdir -p $HOME/.cache/pip && chmod -R 777 $HOME/.cache
72
+
73
  # Copy requirements first to leverage Docker cache
74
  COPY --chown=user requirements.txt .
75
+ RUN pip install --upgrade pip \
76
+ && pip install -r requirements.txt # Removed --no-cache-dir
77
 
78
  # Copy application code
79
  COPY --chown=user . .
app.py CHANGED
@@ -1,3 +1,5 @@
 
 
1
  from fastapi import FastAPI, HTTPException, Depends
2
  from pydantic import BaseModel
3
  from itertools import islice
@@ -44,6 +46,40 @@ app.add_middleware(
44
  # ----- Helper Functions -----
45
  async def get_api_version():
46
  return API_VERSION
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  # ----- API Endpoints -----
49
  @app.get("/", response_model=SuccessResponse)
@@ -151,3 +187,18 @@ async def protected_route(secret_key: Optional[str] = None):
151
  "secret_data": "πŸ” You've unlocked premium content!",
152
  "access_level": "VIP"
153
  })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from bs4 import BeautifulSoup
3
  from fastapi import FastAPI, HTTPException, Depends
4
  from pydantic import BaseModel
5
  from itertools import islice
 
46
  # ----- Helper Functions -----
47
  async def get_api_version():
48
  return API_VERSION
49
+
50
+
51
+ async def HentaiAnime():
52
+ try:
53
+ page = random.randint(1, 1153)
54
+ response = requests.get(f'https://sfmcompile.club/page/{page}')
55
+ response.raise_for_status()
56
+ soup = BeautifulSoup(response.text, 'html.parser')
57
+ hasil = []
58
+ articles = soup.select('#primary > div > div > ul > li > article')
59
+ for article in articles:
60
+ title = article.select_one('header > h2').text
61
+ link = article.select_one('header > h2 > a')['href']
62
+ category = article.select_one('header > div.entry-before-title > span > span').text.replace('in ', '')
63
+ share_count = article.select_one('header > div.entry-after-title > p > span.entry-shares').text
64
+ views_count = article.select_one('header > div.entry-after-title > p > span.entry-views').text
65
+ type_ = article.select_one('source')['type'] if article.select_one('source') else 'image/jpeg'
66
+ video_1 = article.select_one('source')['src'] if article.select_one('source') else article.select_one('img')['data-src']
67
+ video_2 = article.select_one('video > a')['href'] if article.select_one('video > a') else ''
68
+ hasil.append({
69
+ "title": title,
70
+ "link": link,
71
+ "category": category,
72
+ "share_count": share_count,
73
+ "views_count": views_count,
74
+ "type": type_,
75
+ "video_1": video_1,
76
+ "video_2": video_2
77
+ })
78
+ if not hasil:
79
+ return {'developer': '@neomatrix90', 'error': 'no result found'}
80
+ return hasil
81
+ except Exception:
82
+ return None
83
 
84
  # ----- API Endpoints -----
85
  @app.get("/", response_model=SuccessResponse)
 
187
  "secret_data": "πŸ” You've unlocked premium content!",
188
  "access_level": "VIP"
189
  })
190
+
191
+
192
+ @app.get("/prono/hentai", response_model=SuccessResponse)
193
+ async def hentai_():
194
+ try:
195
+ response = await HentaiAnime()
196
+ return SuccessResponse(
197
+ status="True",
198
+ data={"results": response}
199
+ )
200
+ except:
201
+ return SuccessResponse(
202
+ status="False",
203
+ data={"error": "Error fucking"}
204
+ )
requirements.txt CHANGED
@@ -1,3 +1,5 @@
 
 
1
  fastapi
2
  uvicorn[standard]
3
  httpx>=0.28.1
 
1
+ bs4
2
+ requests
3
  fastapi
4
  uvicorn[standard]
5
  httpx>=0.28.1