Docty commited on
Commit
e4876a6
·
verified ·
1 Parent(s): 77327d0

Upload 6 files

Browse files
.gitattributes CHANGED
@@ -34,3 +34,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  src/data/documents/Recon[[:space:]]Robot[[:space:]]Thrusters.pdf filter=lfs diff=lfs merge=lfs -text
 
 
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
  src/data/documents/Recon[[:space:]]Robot[[:space:]]Thrusters.pdf filter=lfs diff=lfs merge=lfs -text
37
+ data/documents/Recon[[:space:]]Robot[[:space:]]Thrusters.pdf filter=lfs diff=lfs merge=lfs -text
config/model_config.yaml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model:
2
+ language_name: qwen2.5:7b
3
+ embedding: bge-m3
4
+ predictive_model: model/predictive_model_RandomForest.pkl
5
+ xia_model: model/xia_config.pkl
6
+
7
+ chunk:
8
+ size: 512
9
+ overlap: 64
10
+
11
+ url:
12
+ server: http://api:8080/
13
+
14
+ documents:
15
+ breakpoints: ./data/documents/
16
+
17
+ storage: ./storage/
data/documents/Recon Robot Thrusters.pdf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33c6d9cf7e676684d9de4920057aa5a4337e902555c1ed5bcbecb4aea44c4008
3
+ size 349080
model/.gitkeep ADDED
File without changes
model/predictive_model_RandomForest.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:199ac096cc7f21f2a9abe6493f57c1a32c580fa29e6da8563a48b44e718e75a3
3
+ size 74705553
model/xia_config.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f55ebd665920196c2c31201eeaa0e3a8bb04a6c46eb4a836d0a69ffb6997241
3
+ size 4097281
server.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from typing import List
4
+ from src.predictive.custom_model import ModelAPI
5
+ from src.slm.core import configure_llm
6
+ from src.slm.operation import workflow
7
+ from src.slm.query import xia_query, summary_query
8
+ from src.slm.retrieval import Retrieval
9
+
10
+ modelAPI = ModelAPI()
11
+ configure_llm()
12
+ agent = workflow()
13
+ retrieval = Retrieval()
14
+
15
+ app = FastAPI(title="Recon Robot Health Agent", description="Recon Robot Health Agent API")
16
+
17
+ class SensorInput(BaseModel):
18
+ instances: List[float]
19
+
20
+ class QueryInput(BaseModel):
21
+ query: str
22
+
23
+ def alert_message(status):
24
+ return f"The status of the thruster is {status}"
25
+
26
+ async def identify_root_cause(agent, thruster_data):
27
+ sensor_data = [{**thruster_data}]
28
+ factor_query = xia_query(sensor_data[0])
29
+ sum_query = summary_query()
30
+ return await agent.run(user_msg=f"{factor_query} {sum_query}")
31
+
32
+ async def search_document(retrieval, text):
33
+ return await retrieval.query_context(text)
34
+
35
+
36
+ @app.post("/predict")
37
+ def predict(data: SensorInput):
38
+ features = {
39
+ 'voltage': data.instances[0],
40
+ 'current': data.instances[1],
41
+ 'power': data.instances[2],
42
+ 'temperature': data.instances[3],
43
+ 'driver_temperature': data.instances[4],
44
+ 'speed_rpm': data.instances[5],
45
+ 'thruster_id_encoded': data.instances[6]
46
+ }
47
+ prediction = modelAPI.predict(features)
48
+ status = alert_message(prediction)
49
+
50
+ return {"prediction": status}
51
+
52
+
53
+ @app.post("/rca")
54
+ async def rca(data: SensorInput):
55
+ features = {
56
+ 'voltage': data.instances[0],
57
+ 'current': data.instances[1],
58
+ 'power': data.instances[2],
59
+ 'temperature': data.instances[3],
60
+ 'driver_temperature': data.instances[4],
61
+ 'speed_rpm': data.instances[5],
62
+ 'thruster_id_encoded': data.instances[6]
63
+ }
64
+ long_response = await identify_root_cause(agent, features)
65
+
66
+ return {"rca": str(long_response)}
67
+
68
+
69
+ @app.post('/semantic')
70
+ async def semantic(data: QueryInput):
71
+ response = await search_document(retrieval, data.query)
72
+ return {'search': str(response)}