import tensorrt as trt
#build logger
trt_logger=trt.Logger(trt.Logger.Warning)
#create network
#1.parser
dataType=trt.float32
With trt.Builder(trt_logger) as builder,builder.create_network() as
network,trt.CaffeParser() as parser:
Model_tensors=parser.parse(
deploy="sample.prototxt",
model="sample_weight.caffemodel",
network=network,dtype=datatype)
#using onnx to create network
EXPLICIT_BATCH = 1 << (int)(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)
with trt.Builder(TRT_LOGGER) as builder, builder.create_network(EXPLICIT_BATCH) as
network, trt.OnnxParser(network, TRT_LOGGER) as parser:
with open(model_path, 'rb') as model:
if not parser.parse(model.read()):
for error in range(parser.num_errors):
print(parser.get_error(error))
#创建engine:
Config=builder.create_builder_config()
Config.max_workspace_size=1<<20
With builder.build_engine(network,config) as engine:
#engine 序列化:
Str=engine.serialize()
With open("sample.engine","wb") as f:
f.write(engine.serialize())