Spaces:
				
			
			
	
			
			
		Running
		
			on 
			
			Zero
	
	
	
			
			
	
	
	
	
		
		
		Running
		
			on 
			
			Zero
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -54,14 +54,17 @@ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
| 54 | 
             
            def prompting(text, source_lang, target_lang):
         | 
| 55 | 
             
                l = LANGUAGES[target_lang]
         | 
| 56 | 
             
                prompt=f"Translate the following {source_lang} sentence into {target_lang} and explain it in detail:\n{text} <{l}>"
         | 
|  | |
| 57 |  | 
| 58 | 
             
            @spaces.GPU(duration=120)
         | 
| 59 | 
             
            def translate_text(text, source_lang, target_lang):
         | 
| 60 | 
             
                if not text.strip():
         | 
| 61 | 
             
                    return "请输入要翻译的文本"
         | 
| 62 | 
             
                try:
         | 
|  | |
|  | |
| 63 | 
             
                    input_tokens = (
         | 
| 64 | 
            -
                        tokenizer( | 
| 65 | 
             
                        .input_ids[0]
         | 
| 66 | 
             
                        .cpu()
         | 
| 67 | 
             
                        .numpy()
         | 
| @@ -74,7 +77,7 @@ def translate_text(text, source_lang, target_lang): | |
| 74 | 
             
                        num_return_sequences=1,
         | 
| 75 | 
             
                    )
         | 
| 76 | 
             
                    full_output = tokenizer.decode(translated_chunk[0], skip_special_tokens=True)
         | 
| 77 | 
            -
                    full_output = full_output.replace( | 
| 78 | 
             
                    yield full_output
         | 
| 79 | 
             
                except Exception as e:
         | 
| 80 | 
             
                    yield f"翻译出错: {str(e)}"
         | 
|  | |
| 54 | 
             
            def prompting(text, source_lang, target_lang):
         | 
| 55 | 
             
                l = LANGUAGES[target_lang]
         | 
| 56 | 
             
                prompt=f"Translate the following {source_lang} sentence into {target_lang} and explain it in detail:\n{text} <{l}>"
         | 
| 57 | 
            +
                return prompt
         | 
| 58 |  | 
| 59 | 
             
            @spaces.GPU(duration=120)
         | 
| 60 | 
             
            def translate_text(text, source_lang, target_lang):
         | 
| 61 | 
             
                if not text.strip():
         | 
| 62 | 
             
                    return "请输入要翻译的文本"
         | 
| 63 | 
             
                try:
         | 
| 64 | 
            +
                    prompt = prompting(text, source_lang, target_lang)
         | 
| 65 | 
            +
                    print(prompt)
         | 
| 66 | 
             
                    input_tokens = (
         | 
| 67 | 
            +
                        tokenizer(prompt, return_tensors="pt")
         | 
| 68 | 
             
                        .input_ids[0]
         | 
| 69 | 
             
                        .cpu()
         | 
| 70 | 
             
                        .numpy()
         | 
|  | |
| 77 | 
             
                        num_return_sequences=1,
         | 
| 78 | 
             
                    )
         | 
| 79 | 
             
                    full_output = tokenizer.decode(translated_chunk[0], skip_special_tokens=True)
         | 
| 80 | 
            +
                    full_output = full_output.replace(prompt.strip(),"")
         | 
| 81 | 
             
                    yield full_output
         | 
| 82 | 
             
                except Exception as e:
         | 
| 83 | 
             
                    yield f"翻译出错: {str(e)}"
         | 
