import onnxruntime as ort import os # 将此列表替换为您的模型文件实际路径 model_paths = { "Rotator": "./checkpoints/model_gray_mobilenetv2_rotcls.onnx", "Detector": "./checkpoints/faceboxesv2-640x640.onnx", "PoseVar": "./checkpoints/fsanet-var.onnx", "PoseConv": "./checkpoints/fsanet-conv.onnx", "Landmarker1": "./checkpoints/face_landmarker_pts5_net1.onnx", "Landmarker2": "./checkpoints/face_landmarker_pts5_net2.onnx", "Recognizer": "./checkpoints/face_recognizer.onnx", } print("--- 检查 Python ONNX 模型输入形状 ---") for name, path in model_paths.items(): if not os.path.exists(path): print(f"[警告] {name} 模型未找到: {path}") continue try: session = ort.InferenceSession(path) # 打印输入 input_meta = session.get_inputs() for i, input_node in enumerate(input_meta): print(f"Model {name} input {i} ({input_node.name}): {input_node.shape}") # 您也可以取消注释下面这行来检查输出 # output_meta = session.get_outputs() # for i, output_node in enumerate(output_meta): # print(f"Model {name} output {i} ({output_node.name}): {output_node.shape}") except Exception as e: print(f"[错误] 加载 {name} ({path}) 失败: {e}") print("--- 检查完成 ---")