Frenchizer commited on
Commit
1d39b8a
·
verified ·
1 Parent(s): 4737205

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +86 -0
app.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import pipeline
3
+ import spacy
4
+ import language_tool_python
5
+ import json
6
+ import requests
7
+
8
+ # Initialize models and tools
9
+ nlp = spacy.load("en_core_web_sm")
10
+ language_tool = language_tool_python.LanguageTool('en-US')
11
+ spell_checker = pipeline("text2text-generation", model="oliverguhr/spelling-correction-english-base")
12
+
13
+ def preprocess_and_forward(text: str) -> str:
14
+ processed_text, preprocessing_results = preprocess_text(text)
15
+
16
+ try:
17
+ # Forward preprocessed text to context detection (space_9)
18
+ context_response = requests.post(
19
+ "https://api.gradio.app/v2/Frenchizer/space_9/predict",
20
+ json={"data": [processed_text]}
21
+ ).json()
22
+
23
+ if "error" in context_response:
24
+ return json.dumps({
25
+ "error": "Context detection failed",
26
+ "preprocessing_results": preprocessing_results
27
+ })
28
+
29
+ context = context_response["data"][0]
30
+
31
+ # Return preprocessing and detected context
32
+ result = {
33
+ "preprocessing": preprocessing_results,
34
+ "context": context
35
+ }
36
+ return json.dumps(result)
37
+
38
+ except Exception as e:
39
+ return json.dumps({
40
+ "error": str(e),
41
+ "preprocessing_results": preprocessing_results
42
+ })
43
+
44
+ def preprocess_text(text: str):
45
+ result = {
46
+ "corrections": [],
47
+ "entities": [],
48
+ "tags": [],
49
+ "spell_suggestions": []
50
+ }
51
+
52
+ # Spell checking
53
+ matches = language_tool.check(text)
54
+ for match in matches:
55
+ if match.replacements:
56
+ result["corrections"].append({
57
+ "original": match.context[match.offsetInContext:match.offsetInContext + match.errorLength],
58
+ "suggestion": match.replacements[0]
59
+ })
60
+
61
+ # Transformer-based spell check
62
+ spell_checked = spell_checker(text, max_length=512)[0]['generated_text']
63
+ if spell_checked != text:
64
+ result["spell_suggestions"].append({
65
+ "original": text,
66
+ "corrected": spell_checked
67
+ })
68
+
69
+ # NER with spaCy
70
+ doc = nlp(text)
71
+ result["entities"] = [{"text": ent.text, "label": ent.label_} for ent in doc.ents]
72
+
73
+ # Extract potential tags
74
+ result["tags"] = [token.text for token in doc if token.text.startswith(('#', '@'))]
75
+
76
+ return text, result
77
+
78
+ # Gradio interface
79
+ with gr.Blocks() as demo:
80
+ input_text = gr.Textbox(label="Input Text")
81
+ output_json = gr.JSON(label="Processing Results")
82
+ preprocess_button = gr.Button("Process")
83
+ preprocess_button.click(fn=preprocess_and_forward, inputs=[input_text], outputs=[output_json])
84
+
85
+ if __name__ == "__main__":
86
+ demo.launch()