import streamlit as st import os ROOT_FIG_DIR = f'{os.getcwd()}/figures/' def get_product_dev_page_layout(): model_details ={ "Model Description": "[EfficientNet](https://arxiv.org/abs/1905.11946) is used for transfer learning.", "Model Type": "Convolutional Neural Nets", } dev_details = { "Training Framework": "Tensorflow Keras", "Backbone Architeture":"EfficientNetB4", "Number of classes":4, "Number of training epochs": 10, "Dropout rate": 0.2, "batch_size": 8, "learning_rate":0.001, "early_stopping_epochs":10, "reduce_learning_rate_patience":3, "source_code":"https://github.com/kaplansinan/OCTRetImageGen_CLcVAE", } production_details ={ "Model size": "26MB", "Model Input": "(N,180,180,3)", "Modeul Output":"(N,4)", "Framework":"ONNXRuntime", } hardware_details ={ "Os System": "Ubuntu 20.14", "GPU Card": "NVIDIA GeForce 3060 6GB", } row2_1, row2_2, row2_3= st.tabs(["General Info", "Development Info", "Production Info"]) with row2_1: # st.write("**Architectural Details**") st.subheader('Architectural Details') list_test = """""" st.markdown(list_test, unsafe_allow_html=True) # st.json(model_details) st.caption('Architeture Visualization') st.image(f'{ROOT_FIG_DIR}/model_architecture_vis.png') with st.expander('License: CC BY 4.0 license(Click for details)'): st.write(""" The files associated with this dataset are licensed under a Creative Commons Attribution 4.0 International license. What does this mean? You can share, copy and modify this dataset so long as you give appropriate credit, provide a link to the CC BY license, and indicate if changes were made, but you may not do so in a way that suggests the rights holder has endorsed you or your use of the dataset. Note that further permission may be required for any content within the dataset that is identified as belonging to a third party. More details about the licences can be found [here](https://creativecommons.org/about/cclicenses/). """) # with st.expander('Click for More Info'): with row2_2: st.subheader('Model Development Details') # with st.expander('Click for More Info'): # st.write(['Framework Details:', 'Tensorflow is used for training and testing']) # st.caption('Framework Details:') # st.subheader('Subheader Framework Details:') # st.text('Fixed width text') # st.markdown('_Markdown_') # see * st.write( """ ## Training pipeline is implemented in Python. Tensorflow framework is used for training. """) new_title = '
Training Hardware Info:
' st.markdown(new_title, unsafe_allow_html=True) st.json(hardware_details) new_title = '
Training Hyperparameters:
' # list_test = """""" # st.markdown(list_test, unsafe_allow_html=True) st.markdown(new_title, unsafe_allow_html=True) # st.write("Tensorflow is used for training and testing") # st.metric(label="Temp", value="273 K", delta="1.2 K") # st.write("**Develooment Details**") st.json(dev_details) with row2_3: # st.write("**Production Details**") st.subheader('How to use the model') st.write("The model is served through gRpc client and one can call the model in the production environment by following a sample code snippet written in python:") with st.expander("Inference Call"): code_txt = ''' import json,base64 import requests # convert image file to base64str def img_to_base64(filename): with open(filename, "rb") as image_file: base64str = base64.b64encode(image_file.read()).decode("utf-8") return base64str # run prediction def predict(image_file:str,url:str): base64_img = img_to_base64(image_file) payload = json.dumps({"base64str": base64_img}) result = requests.post(url,data = payload) return result.json() # Image file image_path = "your_image_file_with_path.png" # Server url (an example -> "http://127.0.0.1:8080/predict") serving_server_url = '/predict' # Inference call to serving server preds = predict(image_path,url=serving_server_url) ''' st.code(code_txt, language='python') st.subheader('Served Model details') col1, col2, col3, col4,col5,col6 = st.columns(6) col1.metric("Model Size", "26MB") col2.metric("Model Input Size", "180x180x3") col3.metric("Model Output", "1x4") col6.metric("Model Framework", "ONNX") col5.metric("GPU Inference(fps)", "45 fps") col4.metric("CPU Inference(fps)", "20 fps") # list_test = """""" # st.markdown(list_test, unsafe_allow_html=True) def get_product_manager_page_layout(): model_details ={ "Model Description": "[EfficientNet](https://arxiv.org/abs/1905.11946) is used for transfer learning.", "Model Type": "Convolutional Neural Nets", } dev_details = { "Training Framework": "Tensorflow Keras", "Backbone Architeture":"EfficientNetB4", "Number of classes":4, "Number of training epochs": 10, "Dropout rate": 0.2, "batch_size": 8, "learning_rate":0.001, "early_stopping_epochs":10, "reduce_learning_rate_patience":3, "source_code":"https://github.com/kaplansinan/MLOps", } production_details ={ "Model size": "26MB", "Model Input": "(N,180,180,3)", "Modeul Output":"(N,4)", "Framework":"ONNXRuntime", } hardware_details ={ "Os System": "Ubuntu 20.14", "GPU Card": "NVIDIA GeForce 3060 6GB", } row2_1, row2_3= st.tabs(["General Info", "Production Info"]) with row2_1: # st.write("**Architectural Details**") st.subheader('Architectural Details') list_test = """""" st.markdown(list_test, unsafe_allow_html=True) # st.json(model_details) st.caption('Architeture Visualization') st.image(f'{ROOT_FIG_DIR}/model_architecture_vis.png') with st.expander('License: CC BY 4.0 license(Click for details)'): st.write(""" The files associated with this dataset are licensed under a Creative Commons Attribution 4.0 International license. What does this mean? You can share, copy and modify this dataset so long as you give appropriate credit, provide a link to the CC BY license, and indicate if changes were made, but you may not do so in a way that suggests the rights holder has endorsed you or your use of the dataset. Note that further permission may be required for any content within the dataset that is identified as belonging to a third party. More details about the licences can be found [here](https://creativecommons.org/about/cclicenses/). """) # with st.expander('Click for More Info'): with row2_3: # st.write("**Production Details**") st.subheader('How to use the model') st.write("The model is served through gRpc client and one can call the model in the production environment by following a sample code snippet written in python:") with st.expander("Inference Call"): code_txt = ''' import json,base64 import requests # convert image file to base64str def img_to_base64(filename): with open(filename, "rb") as image_file: base64str = base64.b64encode(image_file.read()).decode("utf-8") return base64str # run prediction def predict(image_file:str,url:str): base64_img = img_to_base64(image_file) payload = json.dumps({"base64str": base64_img}) result = requests.post(url,data = payload) return result.json() # Image file image_path = "your_image_file_with_path.png" # Server url (an example -> "http://127.0.0.1:8080/predict") serving_server_url = '/predict' # Inference call to serving server preds = predict(image_path,url=serving_server_url) ''' st.code(code_txt, language='python') st.subheader('Served Model details') col1, col2, col3, col4,col5,col6 = st.columns(6) col1.metric("Model Size", "26MB") col2.metric("Model Input Size", "180x180x3") col3.metric("Model Output", "1x4") col6.metric("Model Framework", "ONNX") col5.metric("GPU Inference(fps)", "45 fps") col4.metric("CPU Inference(fps)", "20 fps") def get_product_practitioner_page_layout(): model_details ={ "Model Description": "[EfficientNet](https://arxiv.org/abs/1905.11946) is used for transfer learning.", "Model Type": "Convolutional Neural Nets", } dev_details = { "Training Framework": "Tensorflow Keras", "Backbone Architeture":"EfficientNetB4", "Number of classes":4, "Number of training epochs": 10, "Dropout rate": 0.2, "batch_size": 8, "learning_rate":0.001, "early_stopping_epochs":10, "reduce_learning_rate_patience":3, "source_code":"https://github.com/kaplansinan/MLOps", } production_details ={ "Model size": "26MB", "Model Input": "(N,180,180,3)", "Modeul Output":"(N,4)", "Framework":"ONNXRuntime", } hardware_details ={ "Os System": "Ubuntu 20.14", "GPU Card": "NVIDIA GeForce 3060 6GB", } row2_1, row2_3= st.tabs(["General Info", "Production Info"]) with row2_1: # st.write("**Architectural Details**") st.subheader('Architectural Details') list_test = """""" st.markdown(list_test, unsafe_allow_html=True) # st.json(model_details) st.caption('Architeture Visualization') st.image(f'{ROOT_FIG_DIR}/model_architecture_vis.png') with st.expander('License: CC BY 4.0 license(Click for details)'): st.write(""" The files associated with this dataset are licensed under a Creative Commons Attribution 4.0 International license. What does this mean? You can share, copy and modify this dataset so long as you give appropriate credit, provide a link to the CC BY license, and indicate if changes were made, but you may not do so in a way that suggests the rights holder has endorsed you or your use of the dataset. Note that further permission may be required for any content within the dataset that is identified as belonging to a third party. More details about the licences can be found [here](https://creativecommons.org/about/cclicenses/). """) # with st.expander('Click for More Info'): with row2_3: # st.write("**Production Details**") st.subheader('How to use the model') st.write("The model is served through gRpc client and one can call the model in the production environment by following a sample code snippet written in python:") with st.expander("Inference Call"): code_txt = ''' import json,base64 import requests # convert image file to base64str def img_to_base64(filename): with open(filename, "rb") as image_file: base64str = base64.b64encode(image_file.read()).decode("utf-8") return base64str # run prediction def predict(image_file:str,url:str): base64_img = img_to_base64(image_file) payload = json.dumps({"base64str": base64_img}) result = requests.post(url,data = payload) return result.json() # Image file image_path = "your_image_file_with_path.png" # Server url (an example -> "http://127.0.0.1:8080/predict") serving_server_url = '/predict' # Inference call to serving server preds = predict(image_path,url=serving_server_url) ''' st.code(code_txt, language='python') st.subheader('Served Model details') col1, col2, col3, col4,col5,col6 = st.columns(6) col1.metric("Model Size", "26MB") col2.metric("Model Input Size", "180x180x3") col3.metric("Model Output", "1x4") col6.metric("Model Framework", "ONNX") col5.metric("GPU Inference(fps)", "45 fps") col4.metric("CPU Inference(fps)", "20 fps") # list_test = """
    #
  • Model Size: 26MB
  • #
  • Model Input: (180x180x3)
  • #
  • Model Output: (1x4)
  • #
  • Model Framework: ONNXRuntime
  • #
""" # st.markdown(list_test, unsafe_allow_html=True)