prepare_lite_cpp.sh 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. #!/bin/bash
  2. source ./test_tipc/common_func.sh
  3. FILENAME=$1
  4. dataline=$(cat ${FILENAME})
  5. # parser params
  6. IFS=$'\n'
  7. lines=(${dataline})
  8. IFS=$'\n'
  9. paddlelite_library_source=$2
  10. inference_cmd=$(func_parser_value "${lines[1]}")
  11. DEVICE=$(func_parser_value "${lines[2]}")
  12. det_lite_model_list=$(func_parser_value "${lines[3]}")
  13. rec_lite_model_list=$(func_parser_value "${lines[4]}")
  14. cls_lite_model_list=$(func_parser_value "${lines[5]}")
  15. if [[ $inference_cmd =~ "det" ]]; then
  16. lite_model_list=${det_lite_model_list}
  17. elif [[ $inference_cmd =~ "rec" ]]; then
  18. lite_model_list=(${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
  19. elif [[ $inference_cmd =~ "system" ]]; then
  20. lite_model_list=(${det_lite_model_list[*]} ${rec_lite_model_list[*]} ${cls_lite_model_list[*]})
  21. else
  22. echo "inference_cmd is wrong, please check."
  23. exit 1
  24. fi
  25. if [ ${DEVICE} = "ARM_CPU" ]; then
  26. valid_targets="arm"
  27. paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.android.armv8.gcc.c++_shared.with_extra.with_cv.tar.gz"
  28. end_index="66"
  29. compile_with_opencl="OFF"
  30. elif [ ${DEVICE} = "ARM_GPU_OPENCL" ]; then
  31. valid_targets="opencl"
  32. paddlelite_library_url="https://github.com/PaddlePaddle/Paddle-Lite/releases/download/v2.10-rc/inference_lite_lib.armv8.clang.with_exception.with_extra.with_cv.opencl.tar.gz"
  33. end_index="71"
  34. compile_with_opencl="ON"
  35. else
  36. echo "DEVICE only support ARM_CPU, ARM_GPU_OPENCL."
  37. exit 2
  38. fi
  39. # prepare paddlelite model
  40. pip install paddlelite==2.10-rc
  41. current_dir=${PWD}
  42. IFS="|"
  43. model_path=./inference_models
  44. for model in ${lite_model_list[*]}; do
  45. if [[ $model =~ "PP-OCRv2" ]]; then
  46. inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/${model}.tar
  47. elif [[ $model =~ "v2_0" ]]; then
  48. inference_model_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/${model}.tar
  49. elif [[ $model =~ "PP-OCRv3" ]]; then
  50. inference_model_url=https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/${model}.tar
  51. else
  52. echo "Model is wrong, please check."
  53. exit 3
  54. fi
  55. inference_model=${inference_model_url##*/}
  56. wget -nc -P ${model_path} ${inference_model_url}
  57. cd ${model_path} && tar -xf ${inference_model} && cd ../
  58. model_dir=${model_path}/${inference_model%.*}
  59. model_file=${model_dir}/inference.pdmodel
  60. param_file=${model_dir}/inference.pdiparams
  61. paddle_lite_opt --model_dir=${model_dir} --model_file=${model_file} --param_file=${param_file} --valid_targets=${valid_targets} --optimize_out=${model_dir}_opt
  62. done
  63. # prepare test data
  64. data_url=https://paddleocr.bj.bcebos.com/dygraph_v2.0/test/icdar2015_lite.tar
  65. data_file=${data_url##*/}
  66. wget -nc -P ./test_data ${data_url}
  67. cd ./test_data && tar -xf ${data_file} && rm ${data_file} && cd ../
  68. # prepare paddlelite predict library
  69. if [[ ${paddlelite_library_source} = "download" ]]; then
  70. paddlelite_library_zipfile=$(echo $paddlelite_library_url | awk -F "/" '{print $NF}')
  71. paddlelite_library_file=${paddlelite_library_zipfile:0:${end_index}}
  72. wget ${paddlelite_library_url} && tar -xf ${paddlelite_library_zipfile}
  73. cd ${paddlelite_library_zipfile}
  74. elif [[ ${paddlelite_library_source} = "compile" ]]; then
  75. git clone -b release/v2.10 https://github.com/PaddlePaddle/Paddle-Lite.git
  76. cd Paddle-Lite
  77. ./lite/tools/build_android.sh --arch=armv8 --with_cv=ON --with_extra=ON --toolchain=clang --with_opencl=${compile_with_opencl}
  78. cd ../
  79. cp -r Paddle-Lite/build.lite.android.armv8.clang/inference_lite_lib.android.armv8/ .
  80. paddlelite_library_file=inference_lite_lib.android.armv8
  81. else
  82. echo "paddlelite_library_source only support 'download' and 'compile'"
  83. exit 3
  84. fi
  85. # organize the required files
  86. mkdir -p ${paddlelite_library_file}/demo/cxx/ocr/test_lite
  87. cp -r ${model_path}/*_opt.nb test_data ${paddlelite_library_file}/demo/cxx/ocr/test_lite
  88. cp ppocr/utils/ppocr_keys_v1.txt deploy/lite/config.txt ${paddlelite_library_file}/demo/cxx/ocr/test_lite
  89. cp -r ./deploy/lite/* ${paddlelite_library_file}/demo/cxx/ocr/
  90. cp ${paddlelite_library_file}/cxx/lib/libpaddle_light_api_shared.so ${paddlelite_library_file}/demo/cxx/ocr/test_lite
  91. cp ${FILENAME} test_tipc/test_lite_arm_cpp.sh test_tipc/common_func.sh ${paddlelite_library_file}/demo/cxx/ocr/test_lite
  92. cd ${paddlelite_library_file}/demo/cxx/ocr/
  93. git clone https://github.com/cuicheng01/AutoLog.git
  94. # compile and do some postprocess
  95. make -j
  96. sleep 1
  97. make -j
  98. cp ocr_db_crnn test_lite && cp test_lite/libpaddle_light_api_shared.so test_lite/libc++_shared.so
  99. tar -cf test_lite.tar ./test_lite && cp test_lite.tar ${current_dir} && cd ${current_dir}
  100. rm -rf ${paddlelite_library_file}* && rm -rf ${model_path}