searchusermenu
  • 发布文章
  • 消息中心
点赞
收藏
评论
分享
原创

模型格式转换hf->onnx,savemodel->onnx

2023-11-02 10:01:46
5
0
import subprocess
import os
import yaml

def parse_yaml(user_yaml_input):
    parsed_data = yaml.safe_load(user_yaml_input)
    return parsed_data

def main():
    '''
    user_yaml_input = os.getenv("USER_YAML_INPUT")
    source_format = os.getenv("SOURCE_FORMAT")
    target_format = os.getenv("TARGET_FORMAT")
    source_path = os.getenv("SOURCE_PATH")
    target_path = os.getenv("TARGET_PATH")

    if source_format == "savemodel" and target_format == "onnx":
        tf2onnx(user_yaml_input, source_path, target_path)
    if source_format == "huggingface" and target_format == "onnx":
        hf2onnx(user_yaml_input, source_path, target_path)
    '''
    source_format = "savemodel"
    target_format = "onnx"

    tf2onnx_yaml = """
    opset: 15
    is_look: True
    tag: serve
    signature_def: serving_default
    concrete_function: 1
    """
    hf2onnx_yaml = """
    opset: 16
    atol: 1e-4
    """
    tf2onnx_source_path = "./bert/"
    tf2onnx_target_path = "./onnx/"

    hf2onnx_source_path = "./"
    hf2onnx_target_path = "./onnx/model.onnx"

    if source_format == "savemodel" and target_format == "onnx":
        tf2onnx(tf2onnx_yaml, tf2onnx_source_path, tf2onnx_target_path)
    if source_format == "huggingface" and target_format == "onnx":
        hf2onnx(hf2onnx_yaml, hf2onnx_source_path, hf2onnx_target_path)

def tf2onnx(user_yaml_input, source_path, target_path):
    parsed_param = parse_yaml(user_yaml_input)
    print(parsed_param)


    target_path = target_path + "/model.onnx"

    opset = int(parsed_param.get("opset", 15))
    tag = parsed_param.get("tag", "serve")
    is_look = parsed_param.get("is_look", "False")
    signature_def = parsed_param.get("signature_def", "")
    concrete_function = parsed_param.get("concrete_function", "")

    print("source_path:", source_path)
    print("target_path:", target_path)
    print("opset:", opset)
    print("tag:", tag)
    print("is_look:", is_look)
    print("signature_def:", signature_def)
    print("concrete_function:", concrete_function)

    if is_look == True:
        print("more information about this savemodel:")
        lvalue = "/usr/local/lib/python3.10/dist-packages/tensorflow/python/tools/saved_model_cli.py"
        executable = "python"
        showcommand = "show"
        argsd = "--dir"
        argsa = "--all"
        command = f"{executable} {lvalue} {showcommand} {argsd} {source_path} {argsa}"
        process = subprocess.Popen(command, shell=True)
        return_code = process.wait()

    executable = "python"
    tf2onnx = "-m tf2onnx.convert"
    arg1 = "--saved-model"
    arg2 = "--output"
    arg3 = "--opset"
    arg4 = "--tag"
    arg5 = "--signature_def"
    arg6 = "--concrete_function"
    command = f"{executable} {tf2onnx} {arg1} {source_path} {arg2} {target_path} {arg3} {opset} {arg4} {tag} "

    if(signature_def!=""):
        command += f"{arg5} {signature_def} "
    if(concrete_function!=""):
        command += f"{arg6} {concrete_function}"

    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()

def hf2onnx(user_yaml_input, source_path, target_path):
    parsed_param = parse_yaml(user_yaml_input)
    print(parsed_param)

    opset = int(parsed_param.get("opset", 15))
    atol = parsed_param.get("atol", "1e-4")

    print("source_path:", model_path)
    print("target_path:", output_path)
    print("opset:", opset)
    print("atol:", atol)

    executable = "python"
    hf2onnx = "-m transformers.onnx"
    arg1 = "--model"
    arg2 = "--opset"
    arg3 = "--atol"
    command = f"{executable} {hf2onnx} {arg1} {model_path} {output_path} {arg2} {opset} {arg3} {atol} "

    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()


if __name__ == "__main__":
    main()
0条评论
0 / 1000
l****n
28文章数
5粉丝数
l****n
28 文章 | 5 粉丝
原创

模型格式转换hf->onnx,savemodel->onnx

2023-11-02 10:01:46
5
0
import subprocess
import os
import yaml

def parse_yaml(user_yaml_input):
    parsed_data = yaml.safe_load(user_yaml_input)
    return parsed_data

def main():
    '''
    user_yaml_input = os.getenv("USER_YAML_INPUT")
    source_format = os.getenv("SOURCE_FORMAT")
    target_format = os.getenv("TARGET_FORMAT")
    source_path = os.getenv("SOURCE_PATH")
    target_path = os.getenv("TARGET_PATH")

    if source_format == "savemodel" and target_format == "onnx":
        tf2onnx(user_yaml_input, source_path, target_path)
    if source_format == "huggingface" and target_format == "onnx":
        hf2onnx(user_yaml_input, source_path, target_path)
    '''
    source_format = "savemodel"
    target_format = "onnx"

    tf2onnx_yaml = """
    opset: 15
    is_look: True
    tag: serve
    signature_def: serving_default
    concrete_function: 1
    """
    hf2onnx_yaml = """
    opset: 16
    atol: 1e-4
    """
    tf2onnx_source_path = "./bert/"
    tf2onnx_target_path = "./onnx/"

    hf2onnx_source_path = "./"
    hf2onnx_target_path = "./onnx/model.onnx"

    if source_format == "savemodel" and target_format == "onnx":
        tf2onnx(tf2onnx_yaml, tf2onnx_source_path, tf2onnx_target_path)
    if source_format == "huggingface" and target_format == "onnx":
        hf2onnx(hf2onnx_yaml, hf2onnx_source_path, hf2onnx_target_path)

def tf2onnx(user_yaml_input, source_path, target_path):
    parsed_param = parse_yaml(user_yaml_input)
    print(parsed_param)


    target_path = target_path + "/model.onnx"

    opset = int(parsed_param.get("opset", 15))
    tag = parsed_param.get("tag", "serve")
    is_look = parsed_param.get("is_look", "False")
    signature_def = parsed_param.get("signature_def", "")
    concrete_function = parsed_param.get("concrete_function", "")

    print("source_path:", source_path)
    print("target_path:", target_path)
    print("opset:", opset)
    print("tag:", tag)
    print("is_look:", is_look)
    print("signature_def:", signature_def)
    print("concrete_function:", concrete_function)

    if is_look == True:
        print("more information about this savemodel:")
        lvalue = "/usr/local/lib/python3.10/dist-packages/tensorflow/python/tools/saved_model_cli.py"
        executable = "python"
        showcommand = "show"
        argsd = "--dir"
        argsa = "--all"
        command = f"{executable} {lvalue} {showcommand} {argsd} {source_path} {argsa}"
        process = subprocess.Popen(command, shell=True)
        return_code = process.wait()

    executable = "python"
    tf2onnx = "-m tf2onnx.convert"
    arg1 = "--saved-model"
    arg2 = "--output"
    arg3 = "--opset"
    arg4 = "--tag"
    arg5 = "--signature_def"
    arg6 = "--concrete_function"
    command = f"{executable} {tf2onnx} {arg1} {source_path} {arg2} {target_path} {arg3} {opset} {arg4} {tag} "

    if(signature_def!=""):
        command += f"{arg5} {signature_def} "
    if(concrete_function!=""):
        command += f"{arg6} {concrete_function}"

    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()

def hf2onnx(user_yaml_input, source_path, target_path):
    parsed_param = parse_yaml(user_yaml_input)
    print(parsed_param)

    opset = int(parsed_param.get("opset", 15))
    atol = parsed_param.get("atol", "1e-4")

    print("source_path:", model_path)
    print("target_path:", output_path)
    print("opset:", opset)
    print("atol:", atol)

    executable = "python"
    hf2onnx = "-m transformers.onnx"
    arg1 = "--model"
    arg2 = "--opset"
    arg3 = "--atol"
    command = f"{executable} {hf2onnx} {arg1} {model_path} {output_path} {arg2} {opset} {arg3} {atol} "

    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()


if __name__ == "__main__":
    main()
文章来自个人专栏
AI-llama大模型,go语言开发
28 文章 | 2 订阅
0条评论
0 / 1000
请输入你的评论
0
0