66 lines
1.6 KiB
Bash
Executable File
66 lines
1.6 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
if [ -z "$NETRANS_PATH" ]; then
|
|
echo "Need to set enviroment variable NETRANS_PATH"
|
|
exit 1
|
|
fi
|
|
|
|
TENSORZONX=$NETRANS_PATH/pnnacc
|
|
|
|
TENSORZONX="$TENSORZONX inference"
|
|
|
|
DATASET=./dataset.txt
|
|
|
|
function inference_network()
|
|
{
|
|
NAME=$(basename "$1")
|
|
pushd $1
|
|
QUANTIZED=$2
|
|
inf_path='./inf'
|
|
|
|
if [ ${QUANTIZED} = 'float' ]; then
|
|
TYPE=float32;
|
|
quantization_type="float32"
|
|
elif [ ${QUANTIZED} = 'uint8' ]; then
|
|
quantization_type="asymmetric_affine"
|
|
TYPE=quantized;
|
|
elif [ ${QUANTIZED} = 'int8' ]; then
|
|
quantization_type="dynamic_fixed_point-8"
|
|
TYPE=quantized;
|
|
elif [ ${QUANTIZED} = 'int16' ]; then
|
|
quantization_type="dynamic_fixed_point-16"
|
|
TYPE=quantized;
|
|
else
|
|
echo "=========== wrong quantization_type ! ( float / uint8 / int8 / int16 )==========="
|
|
exit -1
|
|
fi
|
|
|
|
cmd="$TENSORZONX \
|
|
--dtype ${TYPE} \
|
|
--batch-size 1 \
|
|
--model-quantize ${NAME}_${quantization_type}.quantize \
|
|
--model ${NAME}.json \
|
|
--model-data ${NAME}.data \
|
|
--output-dir ${inf_path} \
|
|
--with-input-meta ${NAME}_inputmeta.yml \
|
|
--device CPU"
|
|
|
|
echo $cmd
|
|
eval $cmd
|
|
echo "=========== End inference $NAME model ==========="
|
|
|
|
popd
|
|
}
|
|
|
|
if [ "$#" -lt 2 ]; then
|
|
echo "Input a network name and quantized type ( float / uint8 / int8 / int16 )"
|
|
exit -1
|
|
fi
|
|
|
|
if [ ! -e "${1%/}" ]; then
|
|
echo "Directory ${1%/} does not exist !"
|
|
exit -2
|
|
fi
|
|
|
|
inference_network ${1%/} ${2%/}
|