-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathapp.py
More file actions
117 lines (95 loc) · 3.22 KB
/
app.py
File metadata and controls
117 lines (95 loc) · 3.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import pandas as pd
import joblib
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel, ConfigDict
from typing import Dict, Any
import openai
import os
app = FastAPI(title="ML Churn Prediction API", version="1.0.0")
# Load model and scaler
try:
model = joblib.load('logistic_model.pkl')
scaler = joblib.load('scaler.pkl')
except FileNotFoundError as e:
print(f"Model file not found: {e}")
print("Please ensure logistic_model.pkl and scaler.pkl exist in the project directory")
model = None
scaler = None
# Set OpenAI API key
openai.api_key = os.environ.get("OPENAI_API_KEY", "YOUR_API_KEY")
# Define input data model
class CustomerData(BaseModel):
"""Customer data for churn prediction"""
# Add your specific features here based on your model
# Example features (customize based on your actual model):
age: float
tenure: float
monthly_charges: float
total_charges: float
contract_type: str
payment_method: str
model_config = ConfigDict(
json_schema_extra={
"example": {
"age": 45.0,
"tenure": 24.0,
"monthly_charges": 79.85,
"total_charges": 1800.0,
"contract_type": "Month-to-month",
"payment_method": "Electronic check"
}
}
)
# Response model
class PredictionResponse(BaseModel):
churn_probability: float
explanation: str
input_data: Dict[str, Any]
@app.post('/predict', response_model=PredictionResponse)
def predict(data: CustomerData):
# Check if models are loaded
if model is None or scaler is None:
raise HTTPException(
status_code=503,
detail="Model not available. Please ensure model files exist."
)
try:
# Convert Pydantic model to dict for DataFrame
input_dict = data.model_dump()
df = pd.DataFrame([input_dict])
scaled = scaler.transform(df)
prob = model.predict_proba(scaled)[:, 1][0]
# Call OpenAI for summarisation
summary = summarise_prediction(prob, input_dict)
return PredictionResponse(
churn_probability=prob,
explanation=summary,
input_data=input_dict
)
except Exception as e:
raise HTTPException(
status_code=400, detail=f"Prediction error: {str(e)}")
@app.get('/health')
def health_check():
"""Health check endpoint"""
return {
"status": "healthy",
"model_loaded": model is not None,
"scaler_loaded": scaler is not None,
"api_version": "1.0.0"
}
def summarise_prediction(probability, input_data):
try:
prompt = f"""
You are an AI assistant. The model predicted a churn probability of {probability:.2f} for this customer with the following features:
{input_data}
Write a short, clear summary explaining this result to a business stakeholder in simple words.
"""
response = openai.chat.completions.create(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": prompt}],
max_tokens=100
)
return response.choices[0].message.content
except Exception as e:
return f"Explanation unavailable: {str(e)}"