CodingBuddy commited on
Commit
fefeab0
·
verified ·
1 Parent(s): 82165e8

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. Dockerfile +15 -12
  2. app.py +68 -0
  3. requirements.txt +7 -3
Dockerfile CHANGED
@@ -1,20 +1,23 @@
1
- FROM python:3.13.5-slim
 
2
 
 
3
  WORKDIR /app
4
 
5
- RUN apt-get update && apt-get install -y \
6
- build-essential \
7
- curl \
8
- git \
9
- && rm -rf /var/lib/apt/lists/*
10
-
11
- COPY requirements.txt ./
12
- COPY src/ ./src/
13
 
 
14
  RUN pip3 install -r requirements.txt
15
 
16
- EXPOSE 8501
 
 
 
 
 
17
 
18
- HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
 
20
- ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
 
1
+ # Use a minimal base image with Python 3.9 installed
2
+ FROM python:3.9
3
 
4
+ # Set the working directory inside the container to /app
5
  WORKDIR /app
6
 
7
+ # Copy all files from the current directory on the host to the container's /app directory
8
+ COPY . .
 
 
 
 
 
 
9
 
10
+ # Install Python dependencies listed in requirements.txt
11
  RUN pip3 install -r requirements.txt
12
 
13
+ RUN useradd -m -u 1000 user
14
+ USER user
15
+ ENV HOME=/home/user \
16
+ PATH=/home/user/.local/bin:$PATH
17
+
18
+ WORKDIR $HOME/app
19
 
20
+ COPY --chown=user . $HOME/app
21
 
22
+ # Define the command to run the Streamlit app on port "8501" and make it accessible externally
23
+ CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
app.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import streamlit as st
3
+ import pandas as pd
4
+ from huggingface_hub import hf_hub_download
5
+ import joblib
6
+ import os
7
+
8
+ from config import HF_REPO_ID
9
+
10
+ # Assuming config.py is in the same directory or accessible via PYTHONPATH
11
+ # from config import HF_REPO_ID # If you want to load HF_REPO_ID from config.py
12
+
13
+ # For now, hardcode the repo ID as it's defined elsewhere in the notebook
14
+ #HF_REPO_ID = "CodingBuddy/Predictive-maintenance"
15
+
16
+ # Download and load the model
17
+ try:
18
+ model_path = hf_hub_download(repo_id=HF_REPO_ID, filename="Predictive_maintenance_project_best_model.joblib", repo_type="model")
19
+ model = joblib.load(model_path)
20
+ st.success("Model loaded successfully!")
21
+ except Exception as e:
22
+ st.error(f"Error loading model: {e}")
23
+ st.stop()
24
+
25
+ # Streamlit UI for Predictive Maintenance
26
+ st.title("Engine Predictive Maintenance App")
27
+ st.write("""
28
+ This application predicts whether an engine requires maintenance based on its sensor readings.
29
+ Please enter the engine sensor data below to get a prediction.
30
+ """)
31
+
32
+ # User input
33
+ st.header("Engine Sensor Data Input")
34
+
35
+ Engine_RPM = st.number_input("Engine RPM (Revolutions per Minute)", min_value=0.0, max_value=10000.0, value=700.0, step=1.0)
36
+ Lub_oil_pressure = st.number_input("Lubricating Oil Pressure (bar/kPa)", min_value=0.0, max_value=50.0, value=2.493592, step=0.000001, format="%.6f")
37
+ Fuel_pressure = st.number_input("Fuel Pressure (bar/kPa)", min_value=0.0, max_value=50.0, value=11.790927, step=0.000001, format="%.6f")
38
+ Coolant_pressure = st.number_input("Coolant Pressure (bar/kPa)", min_value=0.0, max_value=50.0, value=3.178981, step=0.000001, format="%.6f")
39
+ lub_oil_temp = st.number_input("Lubricating Oil Temperature (°C)", min_value=0.0, max_value=200.0, value=84.144163, step=0.000001, format="%.6f")
40
+ Coolant_temp = st.number_input("Coolant Temperature (°C)", min_value=0.0, max_value=200.0, value=81.632187, step=0.000001, format="%.6f")
41
+
42
+ # Assemble input into DataFrame
43
+ input_data = pd.DataFrame({
44
+ 'Engine_rpm': [Engine_RPM],
45
+ 'Lub_oil_pressure': [Lub_oil_pressure],
46
+ 'Fuel_pressure': [Fuel_pressure],
47
+ 'Coolant_pressure': [Coolant_pressure],
48
+ 'lub_oil_temp': [lub_oil_temp],
49
+ 'Coolant_temp': [Coolant_temp]
50
+ })
51
+
52
+
53
+ if st.button("Predict Engine Condition"):
54
+ try:
55
+ # Ensure the model is loaded before predicting
56
+ if 'model' in locals() and model is not None:
57
+ prediction = model.predict(input_data)[0]
58
+ # Assuming 0 = Normal, 1 = Requires Maintenance (Faulty)
59
+ result = "Requires Maintenance (Faulty)" if prediction == 1 else "Operating Normally"
60
+ st.subheader("Prediction Result:")
61
+ if prediction == 1:
62
+ st.error(f"The model predicts: **{result}**")
63
+ else:
64
+ st.success(f"The model predicts: **{result}**")
65
+ else:
66
+ st.warning("Model is not loaded. Please check the model loading process.")
67
+ except Exception as e:
68
+ st.error(f"An error occurred during prediction: {e}")
requirements.txt CHANGED
@@ -1,3 +1,7 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
1
+ pandas==2.2.2
2
+ huggingface_hub==0.32.6
3
+ streamlit==1.43.2
4
+ joblib==1.5.1
5
+ scikit-learn==1.6.0
6
+ xgboost==2.1.4
7
+ mlflow==3.0.1