ragilbuaj commited on
Commit
cb3234d
·
1 Parent(s): 9c07a98

delete cache dir

Browse files
Files changed (2) hide show
  1. Dockerfile +10 -21
  2. app.py +9 -4
Dockerfile CHANGED
@@ -1,25 +1,14 @@
1
- # Use the existing base image
2
- FROM python:3.9
3
 
4
- # Set the working directory
5
- WORKDIR /app
6
-
7
- # Copy requirements file and install dependencies
8
- COPY requirements.txt .
9
- RUN pip install --no-cache-dir -r requirements.txt
10
-
11
- # Set the HF_HOME environment variable to a writable directory
12
- ENV HF_HOME=/app/.cache
13
- ENV TRANSFORMERS_CACHE=/app/.cache
14
-
15
- # Create the huggingface directory and set the correct permissions
16
- RUN mkdir -p /app/.cache && chmod -R 777 /app/.cache
17
-
18
- # Copy the application files
19
  COPY . .
20
 
21
- # Expose the port (if needed)
22
- EXPOSE 8000
 
 
 
23
 
24
- # Command to run the application
25
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000", "--log-level", "debug"]
 
1
+ # Use the official Python 3.10.9 image
2
+ FROM python:3.10.9
3
 
4
+ # Copy the current directory contents into the container at .
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  COPY . .
6
 
7
+ # Set the working directory to /
8
+ WORKDIR /
9
+
10
+ # Install requirements.txt
11
+ RUN pip install --no-cache-dir --upgrade -r /requirements.txt
12
 
13
+ # Start the FastAPI app on port 7860, the default port expected by Spaces
14
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]
app.py CHANGED
@@ -1,13 +1,10 @@
1
  from fastapi import FastAPI
 
2
  from pydantic import BaseModel
3
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
  import torch
5
  import os
6
 
7
- # Set the cache directory to a writable location
8
- os.environ['TRANSFORMERS_CACHE'] = '/app/.cache'
9
- os.environ['HF_HOME'] = '/app/.cache'
10
-
11
  # Inisialisasi model dan tokenizer
12
  model_name = "w11wo/indonesian-roberta-base-sentiment-classifier"
13
  tokenizer = AutoTokenizer.from_pretrained(model_name)
@@ -16,6 +13,14 @@ model = AutoModelForSequenceClassification.from_pretrained(model_name)
16
  # Inisialisasi FastAPI
17
  app = FastAPI()
18
 
 
 
 
 
 
 
 
 
19
  # Model request body
20
  class TextInput(BaseModel):
21
  text: str
 
1
  from fastapi import FastAPI
2
+ from fastapi.middleware.cors import CORSMiddleware
3
  from pydantic import BaseModel
4
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
5
  import torch
6
  import os
7
 
 
 
 
 
8
  # Inisialisasi model dan tokenizer
9
  model_name = "w11wo/indonesian-roberta-base-sentiment-classifier"
10
  tokenizer = AutoTokenizer.from_pretrained(model_name)
 
13
  # Inisialisasi FastAPI
14
  app = FastAPI()
15
 
16
+ app.add_middleware(
17
+ CORSMiddleware,
18
+ allow_origins=["*"], # Bisa disesuaikan dengan daftar asal yang diizinkan
19
+ allow_credentials=True,
20
+ allow_methods=["*"],
21
+ allow_headers=["*"],
22
+ )
23
+
24
  # Model request body
25
  class TextInput(BaseModel):
26
  text: str