searchusermenu
  • 发布文章
  • 消息中心
点赞
收藏
评论
分享
原创

huggingface->onnx

2023-11-02 07:59:40
1
0
# hf->onnx
python -m transformers.onnx --model=./ onnx/

import subprocess

#input_string = "convert_before:savemodel convert_after:onnx model_path:./bert output_path:./onnx/ opset:15"
#input_string = "convert_before:savemodel convert_after:onnx model_path:./bert/ output_path:./onnx/ opset:16 tag:serve signature_def:serving_default concrete_function:1"
input_string = "convert_before:huggingface convert_after:onnx model_path:./ output_path:./onnx/ opset:16 atol:1e-4"


variables = {}
key_value_pairs = input_string.split(" ")
for pair in key_value_pairs:
    key, value = pair.split(":")
    variables[key.strip()] = value.strip()

convert_before = variables["convert_before"]
convert_after = variables["convert_after"]


if(convert_before=="savemodel" and convert_after=="onnx"):


    model_path = variables.get("model_path")
    output_path = variables.get("output_path") + "/model.onnx"
    opset = int(variables.get("opset", 15))
    tag = variables.get("tag", "serve")
    signature_def = variables.get("signature_def", "")
    concrete_function = variables.get("concrete_function", "")

    print("convert_before:", convert_before)
    print("convert_after:", convert_after)
    print("model_path:", model_path)
    print("output_path:", output_path)
    print("opset:", opset)
    print("tag:", tag)
    print("signature_def:", signature_def)
    print("concrete_function:", concrete_function)
    print("more information about this savemodel:")
    '''
    result = subprocess.run('pip show tensorflow | grep Location', shell=True, capture_output=True, text=True)
    if result.returncode == 0:
        location,lvalue = result.stdout.split(" ")
        print(lvalue)
    else:
        print(result.stderr)

    lvalue = lvalue.strip("\n")+"/tensorflow/python/tools/saved_model_cli.py"
    print("lvalue",lvalue)
    '''
    lvalue = "/usr/local/lib/python3.10/dist-packages/tensorflow/python/tools/saved_model_cli.py"
    executable = "python"
    showcommand = "show"
    argsd = "--dir"
    argsa = "--all"
    command = f"{executable} {lvalue} {showcommand} {argsd} {model_path} {argsa}"
    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()


    executable = "python"
    tf2onnx = "-m tf2onnx.convert"
    arg1 = "--saved-model"
    arg2 = "--output"
    arg3 = "--opset"
    arg4 = "--tag"
    arg5 = "--signature_def"
    arg6 = "--concrete_function"

    command = f"{executable} {tf2onnx} {arg1} {model_path} {arg2} {output_path} {arg3} {opset} {arg4} {tag} "

    if(signature_def!=""):
        command += f"{arg5} {signature_def} "
    if(concrete_function!=""):
        command += f"{arg6} {concrete_function}"


    process = subprocess.Popen(command, shell=True)

    return_code = process.wait()


if(convert_before=="huggingface" and convert_after=="onnx"):


    model_path = variables.get("model_path")
    output_path = variables.get("output_path")
    opset = int(variables.get("opset", 15))
    atol = variables.get("atol", "1e-4")

    print("convert_before:", convert_before)
    print("convert_after:", convert_after)
    print("model_path:", model_path)
    print("output_path:", output_path)
    print("opset:", opset)
    print("atol:", atol)


    executable = "python"
    hf2onnx = "-m transformers.onnx"
    arg1 = "--model"
    arg2 = "--opset"
    arg3 = "--atol"


    command = f"{executable} {hf2onnx} {arg1} {model_path} {output_path} {arg2} {opset} {arg3} {atol} "



    process = subprocess.Popen(command, shell=True)

    return_code = process.wait()
0条评论
0 / 1000
l****n
28文章数
5粉丝数
l****n
28 文章 | 5 粉丝
原创

huggingface->onnx

2023-11-02 07:59:40
1
0
# hf->onnx
python -m transformers.onnx --model=./ onnx/

import subprocess

#input_string = "convert_before:savemodel convert_after:onnx model_path:./bert output_path:./onnx/ opset:15"
#input_string = "convert_before:savemodel convert_after:onnx model_path:./bert/ output_path:./onnx/ opset:16 tag:serve signature_def:serving_default concrete_function:1"
input_string = "convert_before:huggingface convert_after:onnx model_path:./ output_path:./onnx/ opset:16 atol:1e-4"


variables = {}
key_value_pairs = input_string.split(" ")
for pair in key_value_pairs:
    key, value = pair.split(":")
    variables[key.strip()] = value.strip()

convert_before = variables["convert_before"]
convert_after = variables["convert_after"]


if(convert_before=="savemodel" and convert_after=="onnx"):


    model_path = variables.get("model_path")
    output_path = variables.get("output_path") + "/model.onnx"
    opset = int(variables.get("opset", 15))
    tag = variables.get("tag", "serve")
    signature_def = variables.get("signature_def", "")
    concrete_function = variables.get("concrete_function", "")

    print("convert_before:", convert_before)
    print("convert_after:", convert_after)
    print("model_path:", model_path)
    print("output_path:", output_path)
    print("opset:", opset)
    print("tag:", tag)
    print("signature_def:", signature_def)
    print("concrete_function:", concrete_function)
    print("more information about this savemodel:")
    '''
    result = subprocess.run('pip show tensorflow | grep Location', shell=True, capture_output=True, text=True)
    if result.returncode == 0:
        location,lvalue = result.stdout.split(" ")
        print(lvalue)
    else:
        print(result.stderr)

    lvalue = lvalue.strip("\n")+"/tensorflow/python/tools/saved_model_cli.py"
    print("lvalue",lvalue)
    '''
    lvalue = "/usr/local/lib/python3.10/dist-packages/tensorflow/python/tools/saved_model_cli.py"
    executable = "python"
    showcommand = "show"
    argsd = "--dir"
    argsa = "--all"
    command = f"{executable} {lvalue} {showcommand} {argsd} {model_path} {argsa}"
    process = subprocess.Popen(command, shell=True)
    return_code = process.wait()


    executable = "python"
    tf2onnx = "-m tf2onnx.convert"
    arg1 = "--saved-model"
    arg2 = "--output"
    arg3 = "--opset"
    arg4 = "--tag"
    arg5 = "--signature_def"
    arg6 = "--concrete_function"

    command = f"{executable} {tf2onnx} {arg1} {model_path} {arg2} {output_path} {arg3} {opset} {arg4} {tag} "

    if(signature_def!=""):
        command += f"{arg5} {signature_def} "
    if(concrete_function!=""):
        command += f"{arg6} {concrete_function}"


    process = subprocess.Popen(command, shell=True)

    return_code = process.wait()


if(convert_before=="huggingface" and convert_after=="onnx"):


    model_path = variables.get("model_path")
    output_path = variables.get("output_path")
    opset = int(variables.get("opset", 15))
    atol = variables.get("atol", "1e-4")

    print("convert_before:", convert_before)
    print("convert_after:", convert_after)
    print("model_path:", model_path)
    print("output_path:", output_path)
    print("opset:", opset)
    print("atol:", atol)


    executable = "python"
    hf2onnx = "-m transformers.onnx"
    arg1 = "--model"
    arg2 = "--opset"
    arg3 = "--atol"


    command = f"{executable} {hf2onnx} {arg1} {model_path} {output_path} {arg2} {opset} {arg3} {atol} "



    process = subprocess.Popen(command, shell=True)

    return_code = process.wait()
文章来自个人专栏
AI-llama大模型,go语言开发
28 文章 | 2 订阅
0条评论
0 / 1000
请输入你的评论
0
0