Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	Commit 
							
							·
						
						94f2df7
	
1
								Parent(s):
							
							a6dac98
								
Switch to akshayvkt/detect-ai-text to fix model_type error
Browse files- Dockerfile +1 -1
- README.md +1 -1
- app.py +2 -5
- requirements.txt +2 -1
    	
        Dockerfile
    CHANGED
    
    | @@ -20,7 +20,7 @@ RUN pip install --no-cache-dir -r requirements.txt | |
| 20 | 
             
            RUN pip install --no-cache-dir uvicorn
         | 
| 21 |  | 
| 22 | 
             
            # Clear cache and pre-download model
         | 
| 23 | 
            -
            RUN rm -rf /app/.cache/huggingface/* && python -c "from transformers import  | 
| 24 |  | 
| 25 | 
             
            # Copy the application code
         | 
| 26 | 
             
            COPY --chown=myuser:myuser . .
         | 
|  | |
| 20 | 
             
            RUN pip install --no-cache-dir uvicorn
         | 
| 21 |  | 
| 22 | 
             
            # Clear cache and pre-download model
         | 
| 23 | 
            +
            RUN rm -rf /app/.cache/huggingface/* && python -c "from transformers import pipeline; pipeline('text-classification', model='akshayvkt/detect-ai-text')"
         | 
| 24 |  | 
| 25 | 
             
            # Copy the application code
         | 
| 26 | 
             
            COPY --chown=myuser:myuser . .
         | 
    	
        README.md
    CHANGED
    
    | @@ -6,4 +6,4 @@ colorTo: green | |
| 6 | 
             
            sdk: docker
         | 
| 7 | 
             
            app_port: 7860
         | 
| 8 | 
             
            ---
         | 
| 9 | 
            -
            A FastAPI app using  | 
|  | |
| 6 | 
             
            sdk: docker
         | 
| 7 | 
             
            app_port: 7860
         | 
| 8 | 
             
            ---
         | 
| 9 | 
            +
            A FastAPI app using akshayvkt/detect-ai-text to classify text as AI-generated or human-written.
         | 
    	
        app.py
    CHANGED
    
    | @@ -1,14 +1,11 @@ | |
| 1 | 
             
            from fastapi import FastAPI
         | 
| 2 | 
            -
            from transformers import  | 
| 3 | 
             
            from pydantic import BaseModel
         | 
| 4 | 
             
            import torch
         | 
| 5 |  | 
| 6 | 
             
            app = FastAPI()
         | 
| 7 | 
            -
            model_name = "SuperAnnotate/ai-detector"
         | 
| 8 | 
             
            torch.manual_seed(42)  # For reproducibility
         | 
| 9 | 
            -
             | 
| 10 | 
            -
            model = AutoModelForSequenceClassification.from_pretrained(model_name)
         | 
| 11 | 
            -
            detector = pipeline("text-classification", model=model, tokenizer=tokenizer)
         | 
| 12 |  | 
| 13 | 
             
            class TextInput(BaseModel):
         | 
| 14 | 
             
                text: str
         | 
|  | |
| 1 | 
             
            from fastapi import FastAPI
         | 
| 2 | 
            +
            from transformers import pipeline
         | 
| 3 | 
             
            from pydantic import BaseModel
         | 
| 4 | 
             
            import torch
         | 
| 5 |  | 
| 6 | 
             
            app = FastAPI()
         | 
|  | |
| 7 | 
             
            torch.manual_seed(42)  # For reproducibility
         | 
| 8 | 
            +
            detector = pipeline("text-classification", model="akshayvkt/detect-ai-text")
         | 
|  | |
|  | |
| 9 |  | 
| 10 | 
             
            class TextInput(BaseModel):
         | 
| 11 | 
             
                text: str
         | 
    	
        requirements.txt
    CHANGED
    
    | @@ -2,4 +2,5 @@ transformers==4.44.2 | |
| 2 | 
             
            torch==2.4.1
         | 
| 3 | 
             
            fastapi==0.115.2
         | 
| 4 | 
             
            uvicorn==0.32.0
         | 
| 5 | 
            -
            pydantic==2.9.2
         | 
|  | 
|  | |
| 2 | 
             
            torch==2.4.1
         | 
| 3 | 
             
            fastapi==0.115.2
         | 
| 4 | 
             
            uvicorn==0.32.0
         | 
| 5 | 
            +
            pydantic==2.9.2
         | 
| 6 | 
            +
            numpy==2.0.2
         |