File size: 3,834 Bytes
a2c6dc9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import streamlit as st
import torch
import joblib
import numpy as np
from PIL import Image
import shap
import pandas as pd
import matplotlib.pyplot as plt

# Load the model and scalers
@st.cache_resource
def load_model_and_scalers():
    loaded_model = torch.jit.load('model.pt')
    loaded_scaler1 = joblib.load('scaler1.pkl')
    loaded_scaler2 = joblib.load('scaler2.pkl')
    return loaded_model, loaded_scaler1, loaded_scaler2

loaded_model, loaded_scaler1, loaded_scaler2 = load_model_and_scalers()

# Add this new function to load the training data
@st.cache_data
def load_training_data():
    data = pd.read_excel("peak5.xlsx")
    X = data.iloc[:, 0:7].to_numpy()
    return X, data.columns[:7].tolist()

X_train, feature_names = load_training_data()

# Create a wrapper function for SHAP
def f(X):
    with torch.no_grad():
        X_tensor = torch.tensor(X, dtype=torch.float32)
        output = loaded_model(X_tensor).numpy()
    return loaded_scaler2.inverse_transform(output)

def add_logo(logo_path, size=(200, 150)):
    logo = Image.open('logoAI.png')
    logo = logo.resize(size)
    st.image(logo, use_column_width=False)

st.title('Explainable AI (XAI) for Predicting Peak Particle Velocity in Pile Driving on Bangkok Subsoil')
add_logo("logoAI.png")

# Create input fields
st.header('Enter Input Values:')
pile_width = st.number_input('Pile width (mm)', value=300.0,min_value=260.0,max_value=800.0)
pile_length = st.number_input('Pile length (m)', value=18.0,min_value=15.0,max_value=20.0)
weight = st.number_input('Weight (ton)', value=4.2,min_value=3.0,max_value=6.0)
drop_height = st.number_input('Drop height (m)', value=0.5)
distance = st.number_input('Distance (m)', min_value=3.0,value=9.0)
location = st.selectbox('Location', ['On ground', 'On foundation', 'On building'], index=0)
trigger = st.selectbox('Trigger', ['Longitudinal', 'Transverse', 'Vertical'], index=0)

# Convert location and trigger to numerical values
location_value = ['On ground', 'On foundation', 'On building'].index(location) + 1
trigger_value = ['Longitudinal', 'Transverse', 'Vertical'].index(trigger) + 1

# Button to make prediction
if st.button('Make Prediction'):
    # Prepare input data
    input = np.array([pile_width, pile_length, weight, drop_height, distance, location_value, trigger_value])
    inputx = np.reshape(input, (1, 7))
    
    # Transform input data
    X_test1 = loaded_scaler1.transform(inputx).astype(np.float32)
    X_test1 = torch.from_numpy(X_test1)
    
    # Make prediction
    with torch.no_grad():
        test_outputs = loaded_model(X_test1)
        test_outputs2 = loaded_scaler2.inverse_transform(test_outputs.cpu())
    
    # Display results
    st.subheader('Prediction Results:')
    st.write(f"Peak Particle Velocity: {test_outputs2[0][0]:.2f} mm/s")
    
    # Add SHAP explanation
    st.subheader('Explanation of Prediction:')
    
    # Create SHAP explainer
    explainer = shap.KernelExplainer(f, shap.sample(loaded_scaler1.transform(X_train), 100))
    shap_values = explainer.shap_values(X_test1.numpy())
    
    # Create SHAP waterfall plot
    shap_values_single = shap_values[0].flatten()
    expected_value = explainer.expected_value[0]
    
    # Convert feature values to strings
    feature_values = [f"{x:.1f}" for x in inputx[0]]
    
    explanation = shap.Explanation(
        values=shap_values_single,
        base_values=expected_value,
        data=feature_values,
        feature_names=feature_names
    )
    
    fig, ax = plt.subplots()
    shap.plots.waterfall(explanation, show=False)
    st.pyplot(fig)
    
 

st.sidebar.header('About')
st.sidebar.info('This app uses a pre-trained PyTorch model to predict peak particle velocity based on user input. It is specifically designed for Bangkok sub-soil conditions.\n paper:https://arxiv.org/abs/2409.05918')