2023-03-10 22:06:22 +08:00
|
|
|
import tensorflow as tf
|
|
|
|
import numpy as np
|
2023-05-14 18:19:40 +08:00
|
|
|
import keras.models
|
2023-03-10 22:06:22 +08:00
|
|
|
|
|
|
|
def serve_unet_model():
|
2023-04-18 10:31:55 +08:00
|
|
|
TFLITE_MODEL = "../app/UNet_25_Crack.tflite"
|
2023-03-10 22:06:22 +08:00
|
|
|
|
|
|
|
tflite_interpreter = tf.lite.Interpreter(model_path=TFLITE_MODEL)
|
|
|
|
|
|
|
|
input_details = tflite_interpreter.get_input_details()
|
|
|
|
output_details = tflite_interpreter.get_output_details()
|
|
|
|
tflite_interpreter.allocate_tensors()
|
|
|
|
height = input_details[0]['shape'][1]
|
|
|
|
width = input_details[0]['shape'][2]
|
|
|
|
|
|
|
|
return tflite_interpreter, height, width, input_details, output_details
|
|
|
|
|
|
|
|
|
|
|
|
def serve_rcnn_model():
|
|
|
|
detection_graph = tf.Graph()
|
|
|
|
with detection_graph.as_default():
|
|
|
|
od_graph_def = tf.compat.v1.GraphDef()
|
2023-04-18 10:31:55 +08:00
|
|
|
with tf.compat.v1.gfile.GFile("../app/frozen_inference_graph.pb", 'rb') as fid:
|
2023-03-10 22:06:22 +08:00
|
|
|
serialized_graph = fid.read()
|
|
|
|
od_graph_def.ParseFromString(serialized_graph)
|
|
|
|
tf.import_graph_def(od_graph_def, name='')
|
|
|
|
return detection_graph
|
2023-05-14 18:19:40 +08:00
|
|
|
|
|
|
|
|
|
|
|
def serve_bridge_model():
|
|
|
|
mp = "../app/crack_model.h5"
|
|
|
|
model = keras.models.load_model(mp)
|
|
|
|
return model
|