From f36f9324decf6ee6dc66433dc0c6177d9474949a Mon Sep 17 00:00:00 2001 From: yunyaoXYY <109218879+yunyaoXYY@users.noreply.github.com> Date: Thu, 23 Mar 2023 13:11:19 +0800 Subject: [PATCH] [Docs] Pick PPOCR fastdeploy docs from PaddleOCR (#1534) * Pick PPOCR fastdeploy docs from PaddleOCR * improve ppocr * improve readme * remove old PP-OCRv2 and PP-OCRv3 folfers * rename kunlun to kunlunxin * improve readme * improve readme * improve readme --------- Co-authored-by: Jason Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> --- examples/vision/ocr/PP-OCR/README.md | 88 + .../{PP-OCRv2 => PP-OCR}/android/.gitignore | 0 .../README_CN.md => PP-OCR/android/README.md} | 16 +- .../android/app/build.gradle | 0 .../android/app/proguard-rules.pro | 0 .../fastdeploy/ExampleInstrumentedTest.java | 0 .../android/app/src/main/AndroidManifest.xml | 0 .../src/main/assets/labels/ppocr_keys_v1.txt | 0 .../app/examples/ocr/OcrMainActivity.java | 0 .../app/examples/ocr/OcrSettingsActivity.java | 0 .../baidu/paddle/fastdeploy/app/ui/Utils.java | 0 .../app/ui/layout/ActionBarLayout.java | 0 .../ui/view/AppCompatPreferenceActivity.java | 0 .../app/ui/view/CameraSurfaceView.java | 0 .../app/ui/view/ResultListView.java | 0 .../ui/view/adapter/BaseResultAdapter.java | 0 .../app/ui/view/model/BaseResultModel.java | 0 .../res/drawable-v24/action_button_layer.xml | 0 .../src/main/res/drawable-v24/album_btn.xml | 0 .../drawable-v24/ic_launcher_foreground.xml | 0 .../res/drawable-v24/realtime_start_btn.xml | 0 .../res/drawable-v24/realtime_stop_btn.xml | 0 .../result_page_border_section_bk.xml | 0 .../res/drawable-v24/round_corner_btn.xml | 0 .../seekbar_progress_realtime.xml | 0 .../drawable-v24/seekbar_progress_result.xml | 0 .../main/res/drawable-v24/seekbar_thumb.xml | 0 .../res/drawable-v24/seekbar_thumb_shape.xml | 0 .../main/res/drawable-v24/switch_side_btn.xml | 0 .../res/drawable-v24/take_picture_btn.xml | 0 .../app/src/main/res/drawable-xhdpi/album.png | Bin .../main/res/drawable-xhdpi/album_pressed.png | Bin .../src/main/res/drawable-xhdpi/back_btn.png | Bin .../src/main/res/drawable-xhdpi/more_menu.png | Bin .../res/drawable-xhdpi/realtime_start.png | Bin .../drawable-xhdpi/realtime_start_pressed.png | Bin .../main/res/drawable-xhdpi/realtime_stop.png | Bin .../drawable-xhdpi/realtime_stop_pressed.png | Bin .../src/main/res/drawable-xhdpi/scan_icon.png | Bin .../res/drawable-xhdpi/seekbar_handle.png | Bin .../seekbar_progress_dotted.png | Bin .../seekbar_thumb_invisible.png | Bin .../main/res/drawable-xhdpi/switch_side.png | Bin .../drawable-xhdpi/switch_side_pressed.png | Bin .../main/res/drawable-xhdpi/take_picture.png | Bin .../drawable-xhdpi/take_picture_pressed.png | Bin .../drawable-xxhdpi-v4/btn_switch_default.png | Bin .../drawable-xxhdpi-v4/btn_switch_pressed.png | Bin .../src/main/res/drawable/btn_settings.xml | 0 .../res/drawable/btn_settings_default.xml | 0 .../res/drawable/btn_settings_pressed.xml | 0 .../app/src/main/res/drawable/btn_shutter.xml | 0 .../main/res/drawable/btn_shutter_default.xml | 0 .../main/res/drawable/btn_shutter_pressed.xml | 0 .../app/src/main/res/drawable/btn_switch.xml | 0 .../res/drawable/ic_launcher_background.xml | 0 .../res/layout-land/ocr_activity_main.xml | 0 .../src/main/res/layout/ocr_activity_main.xml | 0 .../src/main/res/layout/ocr_camera_page.xml | 0 .../src/main/res/layout/ocr_result_page.xml | 0 .../main/res/layout/ocr_result_page_item.xml | 0 .../res/mipmap-anydpi-v26/ic_launcher.xml | 0 .../mipmap-anydpi-v26/ic_launcher_round.xml | 0 .../src/main/res/mipmap-hdpi/ic_launcher.png | Bin .../res/mipmap-hdpi/ic_launcher_round.png | Bin .../src/main/res/mipmap-mdpi/ic_launcher.png | Bin .../res/mipmap-mdpi/ic_launcher_round.png | Bin .../src/main/res/mipmap-xhdpi/ic_launcher.png | Bin .../res/mipmap-xhdpi/ic_launcher_round.png | Bin .../main/res/mipmap-xxhdpi/ic_launcher.png | Bin .../res/mipmap-xxhdpi/ic_launcher_round.png | Bin .../main/res/mipmap-xxxhdpi/ic_launcher.png | Bin .../res/mipmap-xxxhdpi/ic_launcher_round.png | Bin .../app/src/main/res/values/arrays.xml | 0 .../app/src/main/res/values/colors.xml | 0 .../app/src/main/res/values/dimens.xml | 0 .../app/src/main/res/values/strings.xml | 0 .../app/src/main/res/values/styles.xml | 0 .../app/src/main/res/values/values.xml | 0 .../app/src/main/res/xml/ocr_settings.xml | 0 .../{PP-OCRv2 => PP-OCR}/android/build.gradle | 0 .../android/gradle.properties | 0 .../android/gradle/wrapper/gradle-wrapper.jar | Bin .../gradle/wrapper/gradle-wrapper.properties | 0 .../ocr/{PP-OCRv2 => PP-OCR}/android/gradlew | 0 .../{PP-OCRv2 => PP-OCR}/android/gradlew.bat | 0 .../android/local.properties | 0 .../android/settings.gradle | 0 .../{README_CN.md => PP-OCR/ascend/README.md} | 19 +- .../c => PP-OCR/ascend/cpp}/CMakeLists.txt | 5 +- .../vision/ocr/PP-OCR/ascend/cpp/README.md | 63 + .../ascend/cpp/infer.cc} | 60 +- .../vision/ocr/PP-OCR/ascend/python/README.md | 55 + .../ascend/python/infer.py} | 37 +- examples/vision/ocr/PP-OCR/cpu-gpu/README.md | 26 + .../cpu-gpu}/c/CMakeLists.txt | 0 .../cpu-gpu/c/README.md} | 74 +- .../{PP-OCRv3 => PP-OCR/cpu-gpu}/c/infer.c | 46 +- .../ocr/PP-OCR/cpu-gpu/cpp/CMakeLists.txt | 30 + .../vision/ocr/PP-OCR/cpu-gpu/cpp/README.md | 163 + .../{PP-OCRv3 => PP-OCR/cpu-gpu}/cpp/infer.cc | 111 +- .../ocr/PP-OCR/cpu-gpu/cpp/infer_cls.cc | 79 + .../ocr/PP-OCR/cpu-gpu/cpp/infer_det.cc | 82 + .../ocr/PP-OCR/cpu-gpu/cpp/infer_rec.cc | 83 + .../cpu-gpu}/csharp/CMakeLists.txt | 0 .../cpu-gpu/csharp/README.md} | 66 +- .../cpu-gpu}/csharp/infer.cs | 0 .../ocr/PP-OCR/cpu-gpu/python/README.md | 153 + .../cpu-gpu}/python/infer.py | 101 +- .../ocr/PP-OCR/cpu-gpu/python/infer_cls.py | 77 + .../ocr/PP-OCR/cpu-gpu/python/infer_det.py | 82 + .../ocr/PP-OCR/cpu-gpu/python/infer_rec.py | 79 + .../vision/ocr/PP-OCR/kunlunxin/README.md | 32 + .../kunlunxin}/cpp/CMakeLists.txt | 4 - .../vision/ocr/PP-OCR/kunlunxin/cpp/README.md | 58 + .../kunlunxin/cpp/infer.cc} | 74 +- .../ocr/PP-OCR/kunlunxin/python/README.md | 54 + .../kunlunxin/python/infer.py} | 53 +- examples/vision/ocr/PP-OCR/rockchip/README.md | 23 + .../rockchip}/cpp/CMakeLists.txt | 4 - .../vision/ocr/PP-OCR/rockchip/cpp/README.md | 128 + .../rockchip/cpp/infer.cc} | 0 .../ocr/PP-OCR/rockchip/python/README.md | 112 + .../rockchip/python/infer.py} | 0 .../rknpu2_tools/config/ppocrv3_cls.yaml | 15 + .../rknpu2_tools/config/ppocrv3_det.yaml | 15 + .../rknpu2_tools/config/ppocrv3_rec.yaml | 15 + .../PP-OCR/rockchip/rknpu2_tools/export.py | 80 + examples/vision/ocr/PP-OCR/serving/README.md | 24 + .../serving/fastdeploy_serving/README.md} | 60 +- .../serving/fastdeploy_serving}/client.py | 0 .../models/cls_postprocess/1/model.py | 0 .../models/cls_postprocess/config.pbtxt | 0 .../models/cls_pp/config.pbtxt | 0 .../models/cls_runtime/config.pbtxt | 0 .../models/det_postprocess/1/model.py | 2 +- .../models/det_postprocess/config.pbtxt | 0 .../models/det_preprocess/1/model.py | 0 .../models/det_preprocess/config.pbtxt | 0 .../models/det_runtime/config.pbtxt | 0 .../models/pp_ocr/config.pbtxt | 0 .../models/rec_postprocess/1/model.py | 0 .../models/rec_postprocess/config.pbtxt | 0 .../models/rec_pp/config.pbtxt | 0 .../models/rec_runtime/config.pbtxt | 0 .../serving/fastdeploy_serving}/ppocr.png | Bin .../PP-OCR/serving/simple_serving/README.md | 54 + .../serving/simple_serving}/client.py | 0 .../serving/simple_serving}/server.py | 0 examples/vision/ocr/PP-OCR/sophgo/README.md | 102 + .../sophgo/cpp/CMakeLists.txt | 0 .../vision/ocr/PP-OCR/sophgo/cpp/README.md | 66 + .../{PP-OCRv3 => PP-OCR}/sophgo/cpp/infer.cc | 10 +- .../sophgo/python/README.md | 30 +- .../sophgo/python/infer.py | 0 .../web/README_CN.md => PP-OCR/web/README.md} | 17 +- .../vision/ocr/PP-OCRv2/android/README.md | 203 - .../vision/ocr/PP-OCRv2/android/README_CN.md | 203 - .../ocr/PP-OCRv2/android/app/build.gradle | 125 - .../app/examples/ocr/OcrMainActivity.java | 500 -- .../ocr/PP-OCRv2/android/local.properties | 8 - examples/vision/ocr/PP-OCRv2/c/README.md | 251 - examples/vision/ocr/PP-OCRv2/c/README_CN.md | 251 - examples/vision/ocr/PP-OCRv2/c/infer.c | 250 - examples/vision/ocr/PP-OCRv2/cpp/README.md | 157 - examples/vision/ocr/PP-OCRv2/cpp/README_CN.md | 162 - examples/vision/ocr/PP-OCRv2/cpp/infer.cc | 134 - examples/vision/ocr/PP-OCRv2/csharp/README.md | 153 - .../vision/ocr/PP-OCRv2/csharp/README_CN.md | 153 - examples/vision/ocr/PP-OCRv2/csharp/infer.cs | 79 - examples/vision/ocr/PP-OCRv2/python/README.md | 131 - .../vision/ocr/PP-OCRv2/python/README_CN.md | 133 - examples/vision/ocr/PP-OCRv2/python/infer.py | 217 - .../vision/ocr/PP-OCRv2/serving/README.md | 13 - .../vision/ocr/PP-OCRv2/serving/README_CN.md | 13 - .../vision/ocr/PP-OCRv3/android/.gitignore | 20 - .../vision/ocr/PP-OCRv3/android/README.md | 222 - .../PP-OCRv3/android/app/proguard-rules.pro | 21 - .../fastdeploy/ExampleInstrumentedTest.java | 26 - .../android/app/src/main/AndroidManifest.xml | 30 - .../src/main/assets/labels/ppocr_keys_v1.txt | 6623 ----------------- .../app/examples/ocr/OcrSettingsActivity.java | 198 - .../baidu/paddle/fastdeploy/app/ui/Utils.java | 313 - .../app/ui/layout/ActionBarLayout.java | 33 - .../ui/view/AppCompatPreferenceActivity.java | 111 - .../app/ui/view/CameraSurfaceView.java | 353 - .../app/ui/view/ResultListView.java | 43 - .../ui/view/adapter/BaseResultAdapter.java | 48 - .../app/ui/view/model/BaseResultModel.java | 41 - .../res/drawable-v24/action_button_layer.xml | 14 - .../src/main/res/drawable-v24/album_btn.xml | 7 - .../drawable-v24/ic_launcher_foreground.xml | 34 - .../res/drawable-v24/realtime_start_btn.xml | 7 - .../res/drawable-v24/realtime_stop_btn.xml | 7 - .../result_page_border_section_bk.xml | 12 - .../res/drawable-v24/round_corner_btn.xml | 10 - .../seekbar_progress_realtime.xml | 18 - .../drawable-v24/seekbar_progress_result.xml | 18 - .../main/res/drawable-v24/seekbar_thumb.xml | 9 - .../res/drawable-v24/seekbar_thumb_shape.xml | 26 - .../main/res/drawable-v24/switch_side_btn.xml | 7 - .../res/drawable-v24/take_picture_btn.xml | 7 - .../app/src/main/res/drawable-xhdpi/album.png | Bin 10284 -> 0 bytes .../main/res/drawable-xhdpi/album_pressed.png | Bin 9982 -> 0 bytes .../src/main/res/drawable-xhdpi/back_btn.png | Bin 455 -> 0 bytes .../src/main/res/drawable-xhdpi/more_menu.png | Bin 414 -> 0 bytes .../res/drawable-xhdpi/realtime_start.png | Bin 6166 -> 0 bytes .../drawable-xhdpi/realtime_start_pressed.png | Bin 6113 -> 0 bytes .../main/res/drawable-xhdpi/realtime_stop.png | Bin 7992 -> 0 bytes .../drawable-xhdpi/realtime_stop_pressed.png | Bin 8016 -> 0 bytes .../src/main/res/drawable-xhdpi/scan_icon.png | Bin 1970 -> 0 bytes .../res/drawable-xhdpi/seekbar_handle.png | Bin 23722 -> 0 bytes .../seekbar_progress_dotted.png | Bin 944 -> 0 bytes .../seekbar_thumb_invisible.png | Bin 2864 -> 0 bytes .../main/res/drawable-xhdpi/switch_side.png | Bin 4873 -> 0 bytes .../drawable-xhdpi/switch_side_pressed.png | Bin 4883 -> 0 bytes .../main/res/drawable-xhdpi/take_picture.png | Bin 7514 -> 0 bytes .../drawable-xhdpi/take_picture_pressed.png | Bin 7549 -> 0 bytes .../drawable-xxhdpi-v4/btn_switch_default.png | Bin 1523 -> 0 bytes .../drawable-xxhdpi-v4/btn_switch_pressed.png | Bin 10074 -> 0 bytes .../src/main/res/drawable/btn_settings.xml | 6 - .../res/drawable/btn_settings_default.xml | 13 - .../res/drawable/btn_settings_pressed.xml | 13 - .../app/src/main/res/drawable/btn_shutter.xml | 5 - .../main/res/drawable/btn_shutter_default.xml | 17 - .../main/res/drawable/btn_shutter_pressed.xml | 17 - .../app/src/main/res/drawable/btn_switch.xml | 5 - .../res/drawable/ic_launcher_background.xml | 170 - .../res/layout-land/ocr_activity_main.xml | 14 - .../src/main/res/layout/ocr_activity_main.xml | 14 - .../src/main/res/layout/ocr_camera_page.xml | 160 - .../src/main/res/layout/ocr_result_page.xml | 160 - .../main/res/layout/ocr_result_page_item.xml | 26 - .../res/mipmap-anydpi-v26/ic_launcher.xml | 5 - .../mipmap-anydpi-v26/ic_launcher_round.xml | 5 - .../src/main/res/mipmap-hdpi/ic_launcher.png | Bin 2963 -> 0 bytes .../res/mipmap-hdpi/ic_launcher_round.png | Bin 4905 -> 0 bytes .../src/main/res/mipmap-mdpi/ic_launcher.png | Bin 2060 -> 0 bytes .../res/mipmap-mdpi/ic_launcher_round.png | Bin 2783 -> 0 bytes .../src/main/res/mipmap-xhdpi/ic_launcher.png | Bin 4490 -> 0 bytes .../res/mipmap-xhdpi/ic_launcher_round.png | Bin 6895 -> 0 bytes .../main/res/mipmap-xxhdpi/ic_launcher.png | Bin 6387 -> 0 bytes .../res/mipmap-xxhdpi/ic_launcher_round.png | Bin 10413 -> 0 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.png | Bin 9128 -> 0 bytes .../res/mipmap-xxxhdpi/ic_launcher_round.png | Bin 15132 -> 0 bytes .../app/src/main/res/values/arrays.xml | 39 - .../app/src/main/res/values/colors.xml | 22 - .../app/src/main/res/values/dimens.xml | 17 - .../app/src/main/res/values/strings.xml | 51 - .../app/src/main/res/values/styles.xml | 70 - .../app/src/main/res/values/values.xml | 17 - .../app/src/main/res/xml/ocr_settings.xml | 45 - .../vision/ocr/PP-OCRv3/android/build.gradle | 37 - .../ocr/PP-OCRv3/android/gradle.properties | 13 - .../android/gradle/wrapper/gradle-wrapper.jar | Bin 59203 -> 0 bytes .../gradle/wrapper/gradle-wrapper.properties | 6 - examples/vision/ocr/PP-OCRv3/android/gradlew | 0 .../vision/ocr/PP-OCRv3/android/gradlew.bat | 89 - .../ocr/PP-OCRv3/android/settings.gradle | 1 - examples/vision/ocr/PP-OCRv3/c/README.md | 251 - examples/vision/ocr/PP-OCRv3/cpp/README.md | 64 - examples/vision/ocr/PP-OCRv3/cpp/README_CN.md | 67 - .../vision/ocr/PP-OCRv3/csharp/CMakeLists.txt | 22 - examples/vision/ocr/PP-OCRv3/csharp/README.md | 153 - .../ocr/PP-OCRv3/mini_program/README.md | 40 - .../ocr/PP-OCRv3/mini_program/README_CN.md | 40 - examples/vision/ocr/PP-OCRv3/python/README.md | 55 - .../vision/ocr/PP-OCRv3/python/README_CN.md | 55 - .../ocr/PP-OCRv3/python/serving/README.md | 44 - .../ocr/PP-OCRv3/python/serving/README_CN.md | 44 - examples/vision/ocr/PP-OCRv3/rknpu2/README.md | 77 - .../ocr/PP-OCRv3/rknpu2/cpp/CMakeLists.txt | 14 - .../vision/ocr/PP-OCRv3/rknpu2/cpp/README.md | 55 - .../ocr/PP-OCRv3/rknpu2/cpp/README_CN.md | 63 - .../ocr/PP-OCRv3/rknpu2/python/README.md | 49 - .../ocr/PP-OCRv3/rknpu2/python/README_CN.md | 62 - .../vision/ocr/PP-OCRv3/serving/README.md | 107 - examples/vision/ocr/PP-OCRv3/sophgo/README.md | 88 - .../vision/ocr/PP-OCRv3/sophgo/cpp/README.md | 58 - examples/vision/ocr/PP-OCRv3/web/README.md | 39 - examples/vision/ocr/README.md | 20 - 281 files changed, 2272 insertions(+), 14253 deletions(-) create mode 100644 examples/vision/ocr/PP-OCR/README.md rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/.gitignore (100%) rename examples/vision/ocr/{PP-OCRv3/android/README_CN.md => PP-OCR/android/README.md} (93%) rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/android/app/build.gradle (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/proguard-rules.pro (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/AndroidManifest.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/assets/labels/ppocr_keys_v1.txt (100%) rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/action_button_layer.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/album_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/realtime_start_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/round_corner_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/seekbar_thumb.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/switch_side_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-v24/take_picture_btn.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/album.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/album_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/back_btn.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/more_menu.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/realtime_start.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/realtime_stop.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/scan_icon.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/switch_side.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/take_picture.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_settings.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_settings_default.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_settings_pressed.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_shutter.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_shutter_default.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_shutter_pressed.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/btn_switch.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/drawable/ic_launcher_background.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/layout-land/ocr_activity_main.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/layout/ocr_activity_main.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/layout/ocr_camera_page.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/layout/ocr_result_page.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/layout/ocr_result_page_item.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-hdpi/ic_launcher.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-mdpi/ic_launcher.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/arrays.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/colors.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/dimens.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/strings.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/styles.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/values/values.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/app/src/main/res/xml/ocr_settings.xml (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/build.gradle (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/gradle.properties (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/gradle/wrapper/gradle-wrapper.jar (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/gradle/wrapper/gradle-wrapper.properties (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/gradlew (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/gradlew.bat (100%) rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/android/local.properties (100%) rename examples/vision/ocr/{PP-OCRv2 => PP-OCR}/android/settings.gradle (100%) rename examples/vision/ocr/{README_CN.md => PP-OCR/ascend/README.md} (84%) rename examples/vision/ocr/{PP-OCRv3/c => PP-OCR/ascend/cpp}/CMakeLists.txt (74%) create mode 100644 examples/vision/ocr/PP-OCR/ascend/cpp/README.md rename examples/vision/ocr/{PP-OCRv2/cpp/infer_static_shape.cc => PP-OCR/ascend/cpp/infer.cc} (67%) create mode 100644 examples/vision/ocr/PP-OCR/ascend/python/README.md rename examples/vision/ocr/{PP-OCRv3/python/infer_static_shape.py => PP-OCR/ascend/python/infer.py} (75%) create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/README.md rename examples/vision/ocr/{PP-OCRv2 => PP-OCR/cpu-gpu}/c/CMakeLists.txt (100%) rename examples/vision/ocr/{PP-OCRv3/c/README_CN.md => PP-OCR/cpu-gpu/c/README.md} (74%) mode change 100644 => 100755 rename examples/vision/ocr/{PP-OCRv3 => PP-OCR/cpu-gpu}/c/infer.c (85%) create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/cpp/CMakeLists.txt create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/cpp/README.md rename examples/vision/ocr/{PP-OCRv3 => PP-OCR/cpu-gpu}/cpp/infer.cc (59%) mode change 100755 => 100644 create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_cls.cc create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_det.cc create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_rec.cc rename examples/vision/ocr/{PP-OCRv2 => PP-OCR/cpu-gpu}/csharp/CMakeLists.txt (100%) rename examples/vision/ocr/{PP-OCRv3/csharp/README_CN.md => PP-OCR/cpu-gpu/csharp/README.md} (60%) mode change 100644 => 100755 rename examples/vision/ocr/{PP-OCRv3 => PP-OCR/cpu-gpu}/csharp/infer.cs (100%) create mode 100644 examples/vision/ocr/PP-OCR/cpu-gpu/python/README.md rename examples/vision/ocr/{PP-OCRv3 => PP-OCR/cpu-gpu}/python/infer.py (72%) create mode 100755 examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_cls.py create mode 100755 examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_det.py create mode 100755 examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_rec.py create mode 100644 examples/vision/ocr/PP-OCR/kunlunxin/README.md rename examples/vision/ocr/{PP-OCRv2 => PP-OCR/kunlunxin}/cpp/CMakeLists.txt (71%) create mode 100644 examples/vision/ocr/PP-OCR/kunlunxin/cpp/README.md rename examples/vision/ocr/{PP-OCRv3/cpp/infer_static_shape.cc => PP-OCR/kunlunxin/cpp/infer.cc} (57%) mode change 100755 => 100644 create mode 100644 examples/vision/ocr/PP-OCR/kunlunxin/python/README.md rename examples/vision/ocr/{PP-OCRv2/python/infer_static_shape.py => PP-OCR/kunlunxin/python/infer.py} (73%) create mode 100644 examples/vision/ocr/PP-OCR/rockchip/README.md rename examples/vision/ocr/{PP-OCRv3 => PP-OCR/rockchip}/cpp/CMakeLists.txt (71%) create mode 100755 examples/vision/ocr/PP-OCR/rockchip/cpp/README.md rename examples/vision/ocr/{PP-OCRv3/rknpu2/cpp/infer_static_shape.cc => PP-OCR/rockchip/cpp/infer.cc} (100%) create mode 100755 examples/vision/ocr/PP-OCR/rockchip/python/README.md rename examples/vision/ocr/{PP-OCRv3/rknpu2/python/infer_static_shape.py => PP-OCR/rockchip/python/infer.py} (100%) create mode 100644 examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_cls.yaml create mode 100644 examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_det.yaml create mode 100644 examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_rec.yaml create mode 100644 examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/export.py create mode 100644 examples/vision/ocr/PP-OCR/serving/README.md rename examples/vision/ocr/{PP-OCRv3/serving/README_CN.md => PP-OCR/serving/fastdeploy_serving/README.md} (61%) mode change 100644 => 100755 rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/client.py (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/cls_postprocess/1/model.py (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/cls_postprocess/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/cls_pp/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/cls_runtime/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/det_postprocess/1/model.py (99%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/det_postprocess/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/det_preprocess/1/model.py (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/det_preprocess/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/det_runtime/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/pp_ocr/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/rec_postprocess/1/model.py (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/rec_postprocess/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/rec_pp/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/models/rec_runtime/config.pbtxt (100%) rename examples/vision/ocr/{PP-OCRv3/serving => PP-OCR/serving/fastdeploy_serving}/ppocr.png (100%) create mode 100644 examples/vision/ocr/PP-OCR/serving/simple_serving/README.md rename examples/vision/ocr/{PP-OCRv3/python/serving => PP-OCR/serving/simple_serving}/client.py (100%) rename examples/vision/ocr/{PP-OCRv3/python/serving => PP-OCR/serving/simple_serving}/server.py (100%) create mode 100644 examples/vision/ocr/PP-OCR/sophgo/README.md rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/sophgo/cpp/CMakeLists.txt (100%) create mode 100644 examples/vision/ocr/PP-OCR/sophgo/cpp/README.md rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/sophgo/cpp/infer.cc (95%) rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/sophgo/python/README.md (50%) rename examples/vision/ocr/{PP-OCRv3 => PP-OCR}/sophgo/python/infer.py (100%) rename examples/vision/ocr/{PP-OCRv3/web/README_CN.md => PP-OCR/web/README.md} (76%) delete mode 100644 examples/vision/ocr/PP-OCRv2/android/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/android/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv2/android/app/build.gradle delete mode 100644 examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java delete mode 100644 examples/vision/ocr/PP-OCRv2/android/local.properties delete mode 100755 examples/vision/ocr/PP-OCRv2/c/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/c/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv2/c/infer.c delete mode 100755 examples/vision/ocr/PP-OCRv2/cpp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/cpp/README_CN.md delete mode 100755 examples/vision/ocr/PP-OCRv2/cpp/infer.cc delete mode 100755 examples/vision/ocr/PP-OCRv2/csharp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/csharp/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv2/csharp/infer.cs delete mode 100755 examples/vision/ocr/PP-OCRv2/python/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/python/README_CN.md delete mode 100755 examples/vision/ocr/PP-OCRv2/python/infer.py delete mode 100644 examples/vision/ocr/PP-OCRv2/serving/README.md delete mode 100644 examples/vision/ocr/PP-OCRv2/serving/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv3/android/.gitignore delete mode 100644 examples/vision/ocr/PP-OCRv3/android/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/proguard-rules.pro delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/AndroidManifest.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/assets/labels/ppocr_keys_v1.txt delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/action_button_layer.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/album_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_start_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/round_corner_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/switch_side_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/take_picture_btn.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/back_btn.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/more_menu.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_stop.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/scan_icon.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_default.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_pressed.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_default.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_pressed.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_switch.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/ic_launcher_background.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout-land/ocr_activity_main.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_activity_main.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_camera_page.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page_item.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-mdpi/ic_launcher.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/arrays.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/colors.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/dimens.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/strings.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/styles.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/values.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/app/src/main/res/xml/ocr_settings.xml delete mode 100644 examples/vision/ocr/PP-OCRv3/android/build.gradle delete mode 100644 examples/vision/ocr/PP-OCRv3/android/gradle.properties delete mode 100644 examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.jar delete mode 100644 examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.properties delete mode 100644 examples/vision/ocr/PP-OCRv3/android/gradlew delete mode 100644 examples/vision/ocr/PP-OCRv3/android/gradlew.bat delete mode 100644 examples/vision/ocr/PP-OCRv3/android/settings.gradle delete mode 100755 examples/vision/ocr/PP-OCRv3/c/README.md delete mode 100755 examples/vision/ocr/PP-OCRv3/cpp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/cpp/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv3/csharp/CMakeLists.txt delete mode 100755 examples/vision/ocr/PP-OCRv3/csharp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/mini_program/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/mini_program/README_CN.md delete mode 100755 examples/vision/ocr/PP-OCRv3/python/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/python/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv3/python/serving/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/python/serving/README_CN.md delete mode 100644 examples/vision/ocr/PP-OCRv3/rknpu2/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/rknpu2/cpp/CMakeLists.txt delete mode 100755 examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README_CN.md delete mode 100755 examples/vision/ocr/PP-OCRv3/rknpu2/python/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/rknpu2/python/README_CN.md delete mode 100755 examples/vision/ocr/PP-OCRv3/serving/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/sophgo/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/sophgo/cpp/README.md delete mode 100644 examples/vision/ocr/PP-OCRv3/web/README.md delete mode 100644 examples/vision/ocr/README.md diff --git a/examples/vision/ocr/PP-OCR/README.md b/examples/vision/ocr/PP-OCR/README.md new file mode 100644 index 0000000000..3c157b4cfb --- /dev/null +++ b/examples/vision/ocr/PP-OCR/README.md @@ -0,0 +1,88 @@ +# PaddleOCR高性能全场景模型部署方案—FastDeploy + +## 目录 +- [FastDeploy介绍](#FastDeploy介绍) +- [PaddleOCR模型部署](#PaddleOCR模型部署) +- [常见问题](#常见问题) + +## 1. FastDeploy介绍 +
+ +**[⚡️FastDeploy](https://github.com/PaddlePaddle/FastDeploy)**是一款**全场景**、**易用灵活**、**极致高效**的AI推理部署工具,支持**云边端**部署.使用FastDeploy可以简单高效的在X86 CPU、NVIDIA GPU、飞腾CPU、ARM CPU、Intel GPU、昆仑、昇腾、算能、瑞芯微等10+款硬件上对PaddleOCR模型进行快速部署,并且支持Paddle Inference、Paddle Lite、TensorRT、OpenVINO、ONNXRuntime、SOPHGO、RKNPU2等多种推理后端. + +
+ + + +
+ +## 2. PaddleOCR模型部署 +
+ +### 2.1 硬件支持列表 + +|硬件类型|该硬件是否支持|使用指南|Python|C++| +|:---:|:---:|:---:|:---:|:---:| +|X86 CPU|✅|[链接](./cpu-gpu)|✅|✅| +|NVIDIA GPU|✅|[链接](./cpu-gpu)|✅|✅| +|飞腾CPU|✅|[链接](./cpu-gpu)|✅|✅| +|ARM CPU|✅|[链接](./cpu-gpu)|✅|✅| +|Intel GPU(集成显卡)|✅|[链接](./cpu-gpu)|✅|✅| +|Intel GPU(独立显卡)|✅|[链接](./cpu-gpu)|✅|✅| +|昆仑|✅|[链接](./kunlunxin)|✅|✅| +|昇腾|✅|[链接](./ascend)|✅|✅| +|算能|✅|[链接](./sophgo)|✅|✅| +|瑞芯微|✅|[链接](./rockchip)|✅|✅| + +### 2.2. 详细使用文档 +- X86 CPU + - [部署模型准备](./cpu-gpu) + - [Python部署示例](./cpu-gpu/python/) + - [C++部署示例](./cpu-gpu/cpp/) +- NVIDIA GPU + - [部署模型准备](./cpu-gpu) + - [Python部署示例](./cpu-gpu/python/) + - [C++部署示例](./cpu-gpu/cpp/) +- 飞腾CPU + - [部署模型准备](./cpu-gpu) + - [Python部署示例](./cpu-gpu/python/) + - [C++部署示例](./cpu-gpu/cpp/) +- ARM CPU + - [部署模型准备](./cpu-gpu) + - [Python部署示例](./cpu-gpu/python/) + - [C++部署示例](./cpu-gpu/cpp/) +- Intel GPU + - [部署模型准备](./cpu-gpu) + - [Python部署示例](./cpu-gpu/python/) + - [C++部署示例](./cpu-gpu/cpp/) +- 昆仑 XPU + - [部署模型准备](./kunlunxin) + - [Python部署示例](./kunlunxin/python/) + - [C++部署示例](./kunlunxin/cpp/) +- 昇腾 Ascend + - [部署模型准备](./ascend) + - [Python部署示例](./ascend/python/) + - [C++部署示例](./ascend/cpp/) +- 算能 Sophgo + - [部署模型准备](./sophgo/) + - [Python部署示例](./sophgo/python/) + - [C++部署示例](./sophgo/cpp/) +- 瑞芯微 Rockchip + - [部署模型准备](./rockchip/) + - [Python部署示例](./rockchip/rknpu2/) + - [C++部署示例](./rockchip/rknpu2/) + +### 2.3 更多部署方式 + +- [Android ARM CPU部署](./android) +- [服务化Serving部署](./serving) +- [web部署](./web) + + +## 3. 常见问题 +
+ +遇到问题可查看常见问题集合,搜索FastDeploy issue,*或给FastDeploy提交[issue](https://github.com/PaddlePaddle/FastDeploy/issues)*: + +[常见问题集合](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq) +[FastDeploy issues](https://github.com/PaddlePaddle/FastDeploy/issues) diff --git a/examples/vision/ocr/PP-OCRv2/android/.gitignore b/examples/vision/ocr/PP-OCR/android/.gitignore similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/.gitignore rename to examples/vision/ocr/PP-OCR/android/.gitignore diff --git a/examples/vision/ocr/PP-OCRv3/android/README_CN.md b/examples/vision/ocr/PP-OCR/android/README.md similarity index 93% rename from examples/vision/ocr/PP-OCRv3/android/README_CN.md rename to examples/vision/ocr/PP-OCR/android/README.md index b355119e2b..19ff8a0195 100644 --- a/examples/vision/ocr/PP-OCRv3/android/README_CN.md +++ b/examples/vision/ocr/PP-OCR/android/README.md @@ -1,7 +1,7 @@ [English](README.md) | 简体中文 -# OCR文字识别 Android Demo 使用文档 +# PaddleOCR Android Demo 使用文档 -在 Android 上实现实时的OCR文字识别功能,此 Demo 有很好的的易用性和开放性,如在 Demo 中跑自己训练好的模型等。 +在 Android 上实现实时的PaddleOCR文字识别功能,此 Demo 有很好的的易用性和开放性,如在 Demo 中跑自己训练好的模型等。 ## 环境准备 @@ -10,9 +10,8 @@ ## 部署步骤 -1. OCR文字识别 Demo 位于 `fastdeploy/examples/vision/ocr/PP-OCRv3/android` 目录 -2. 用 Android Studio 打开 PP-OCRv3/android 工程 -3. 手机连接电脑,打开 USB 调试和文件传输模式,并在 Android Studio 上连接自己的手机设备(手机需要开启允许从 USB 安装软件权限) +1. 用 Android Studio 打开 PP-OCRv3/android 工程 +2. 手机连接电脑,打开 USB 调试和文件传输模式,并在 Android Studio 上连接自己的手机设备(手机需要开启允许从 USB 安装软件权限)

image @@ -186,7 +185,7 @@ model.init(detModel, clsModel, recModel); ## 替换 FastDeploy SDK和模型 替换FastDeploy预测库和模型的步骤非常简单。预测库所在的位置为 `app/libs/fastdeploy-android-sdk-xxx.aar`,其中 `xxx` 表示当前您使用的预测库版本号。模型所在的位置为,`app/src/main/assets/models`。 - 替换FastDeploy Android SDK: 下载或编译最新的FastDeploy Android SDK,解压缩后放在 `app/libs` 目录下;详细配置文档可参考: - - [在 Android 中使用 FastDeploy Java SDK](../../../../../java/android/) + - [在 Android 中使用 FastDeploy Java SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/java/android) - 替换OCR模型的步骤: - 将您的OCR模型放在 `app/src/main/assets/models` 目录下; @@ -219,5 +218,6 @@ predictor.init(detModel, recModel); ## 更多参考文档 如果您想知道更多的FastDeploy Java API文档以及如何通过JNI来接入FastDeploy C++ API感兴趣,可以参考以下内容: -- [在 Android 中使用 FastDeploy Java SDK](../../../../../java/android/) -- [在 Android 中使用 FastDeploy C++ SDK](../../../../../docs/cn/faq/use_cpp_sdk_on_android.md) +- [在 Android 中使用 FastDeploy Java SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/java/android) +- [在 Android 中使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_cpp_sdk_on_android.md) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/android/app/build.gradle b/examples/vision/ocr/PP-OCR/android/app/build.gradle similarity index 100% rename from examples/vision/ocr/PP-OCRv3/android/app/build.gradle rename to examples/vision/ocr/PP-OCR/android/app/build.gradle diff --git a/examples/vision/ocr/PP-OCRv2/android/app/proguard-rules.pro b/examples/vision/ocr/PP-OCR/android/app/proguard-rules.pro similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/proguard-rules.pro rename to examples/vision/ocr/PP-OCR/android/app/proguard-rules.pro diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java b/examples/vision/ocr/PP-OCR/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java rename to examples/vision/ocr/PP-OCR/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/AndroidManifest.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/AndroidManifest.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/AndroidManifest.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/AndroidManifest.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/assets/labels/ppocr_keys_v1.txt b/examples/vision/ocr/PP-OCR/android/app/src/main/assets/labels/ppocr_keys_v1.txt similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/assets/labels/ppocr_keys_v1.txt rename to examples/vision/ocr/PP-OCR/android/app/src/main/assets/labels/ppocr_keys_v1.txt diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java similarity index 100% rename from examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java b/examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java rename to examples/vision/ocr/PP-OCR/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/action_button_layer.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/action_button_layer.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/action_button_layer.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/action_button_layer.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/album_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/album_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/album_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/album_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/realtime_start_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/realtime_start_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/realtime_start_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/realtime_start_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/round_corner_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/round_corner_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/round_corner_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/round_corner_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_thumb.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_thumb.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_thumb.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_thumb.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/switch_side_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/switch_side_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/switch_side_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/switch_side_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/take_picture_btn.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/take_picture_btn.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-v24/take_picture_btn.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-v24/take_picture_btn.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/album.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/album.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/album.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/album.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/album_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/album_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/album_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/album_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/back_btn.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/back_btn.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/back_btn.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/back_btn.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/more_menu.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/more_menu.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/more_menu.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/more_menu.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_start.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_start.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_start.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_start.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_stop.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_stop.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_stop.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_stop.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/realtime_stop_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/scan_icon.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/scan_icon.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/scan_icon.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/scan_icon.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/seekbar_thumb_invisible.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/switch_side.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/switch_side.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/switch_side.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/switch_side.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/take_picture.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/take_picture.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/take_picture.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/take_picture.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_pressed.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings_default.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings_default.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings_default.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings_default.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings_pressed.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings_pressed.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_settings_pressed.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_settings_pressed.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter_default.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter_default.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter_default.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter_default.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter_pressed.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter_pressed.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_shutter_pressed.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_shutter_pressed.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_switch.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_switch.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/btn_switch.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/btn_switch.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/ic_launcher_background.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/ic_launcher_background.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/drawable/ic_launcher_background.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/drawable/ic_launcher_background.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout-land/ocr_activity_main.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/layout-land/ocr_activity_main.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout-land/ocr_activity_main.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/layout-land/ocr_activity_main.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_activity_main.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_activity_main.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_activity_main.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_activity_main.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_camera_page.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_camera_page.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_camera_page.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_camera_page.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_result_page.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_result_page.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_result_page.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_result_page.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_result_page_item.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_result_page_item.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/layout/ocr_result_page_item.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/layout/ocr_result_page_item.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-hdpi/ic_launcher.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-hdpi/ic_launcher.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-hdpi/ic_launcher.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-mdpi/ic_launcher.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-mdpi/ic_launcher.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-mdpi/ic_launcher.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/arrays.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/arrays.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/arrays.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/arrays.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/colors.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/colors.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/colors.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/colors.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/dimens.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/dimens.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/dimens.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/dimens.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/strings.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/strings.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/strings.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/strings.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/styles.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/styles.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/styles.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/styles.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/values.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/values/values.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/values/values.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/values/values.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/res/xml/ocr_settings.xml b/examples/vision/ocr/PP-OCR/android/app/src/main/res/xml/ocr_settings.xml similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/app/src/main/res/xml/ocr_settings.xml rename to examples/vision/ocr/PP-OCR/android/app/src/main/res/xml/ocr_settings.xml diff --git a/examples/vision/ocr/PP-OCRv2/android/build.gradle b/examples/vision/ocr/PP-OCR/android/build.gradle similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/build.gradle rename to examples/vision/ocr/PP-OCR/android/build.gradle diff --git a/examples/vision/ocr/PP-OCRv2/android/gradle.properties b/examples/vision/ocr/PP-OCR/android/gradle.properties similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/gradle.properties rename to examples/vision/ocr/PP-OCR/android/gradle.properties diff --git a/examples/vision/ocr/PP-OCRv2/android/gradle/wrapper/gradle-wrapper.jar b/examples/vision/ocr/PP-OCR/android/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/gradle/wrapper/gradle-wrapper.jar rename to examples/vision/ocr/PP-OCR/android/gradle/wrapper/gradle-wrapper.jar diff --git a/examples/vision/ocr/PP-OCRv2/android/gradle/wrapper/gradle-wrapper.properties b/examples/vision/ocr/PP-OCR/android/gradle/wrapper/gradle-wrapper.properties similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/gradle/wrapper/gradle-wrapper.properties rename to examples/vision/ocr/PP-OCR/android/gradle/wrapper/gradle-wrapper.properties diff --git a/examples/vision/ocr/PP-OCRv2/android/gradlew b/examples/vision/ocr/PP-OCR/android/gradlew similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/gradlew rename to examples/vision/ocr/PP-OCR/android/gradlew diff --git a/examples/vision/ocr/PP-OCRv2/android/gradlew.bat b/examples/vision/ocr/PP-OCR/android/gradlew.bat similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/gradlew.bat rename to examples/vision/ocr/PP-OCR/android/gradlew.bat diff --git a/examples/vision/ocr/PP-OCRv3/android/local.properties b/examples/vision/ocr/PP-OCR/android/local.properties similarity index 100% rename from examples/vision/ocr/PP-OCRv3/android/local.properties rename to examples/vision/ocr/PP-OCR/android/local.properties diff --git a/examples/vision/ocr/PP-OCRv2/android/settings.gradle b/examples/vision/ocr/PP-OCR/android/settings.gradle similarity index 100% rename from examples/vision/ocr/PP-OCRv2/android/settings.gradle rename to examples/vision/ocr/PP-OCR/android/settings.gradle diff --git a/examples/vision/ocr/README_CN.md b/examples/vision/ocr/PP-OCR/ascend/README.md similarity index 84% rename from examples/vision/ocr/README_CN.md rename to examples/vision/ocr/PP-OCR/ascend/README.md index 9cf63c52d4..3e13de3ef8 100644 --- a/examples/vision/ocr/README_CN.md +++ b/examples/vision/ocr/PP-OCR/ascend/README.md @@ -1,20 +1,23 @@ [English](README.md) | 简体中文 -# PaddleOCR 模型部署 -## PaddleOCR为多个模型组合串联任务,包含 -- 文本检测 `DBDetector` -- [可选]方向分类 `Classifer` 用于调整进入文字识别前的图像方向 -- 文字识别 `Recognizer` 用于从图像中识别出文字 +# PaddleOCR 模型在华为昇腾上部署方案-FastDeploy -根据不同场景, FastDeploy汇总提供如下OCR任务部署, 用户需同时下载3个模型与字典文件(或2个,分类器可选), 完成OCR整个预测流程 +## 1. 说明 +PaddleOCR支持通过FastDeploy在华为昇腾上部署相关模型 + +## 2. 支持模型列表 -### PP-OCR 中英文系列模型 下表中的模型下载链接由PaddleOCR模型库提供, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) -| OCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | |:----|:----|:----|:----|:----|:--------| | ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | | en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | | ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | | ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | | ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + + +## 3. 详细部署的部署示例 +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/ocr/PP-OCRv3/c/CMakeLists.txt b/examples/vision/ocr/PP-OCR/ascend/cpp/CMakeLists.txt similarity index 74% rename from examples/vision/ocr/PP-OCRv3/c/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/ascend/cpp/CMakeLists.txt index b228346da8..93540a7e83 100644 --- a/examples/vision/ocr/PP-OCRv3/c/CMakeLists.txt +++ b/examples/vision/ocr/PP-OCR/ascend/cpp/CMakeLists.txt @@ -1,4 +1,4 @@ -PROJECT(infer_demo C) +PROJECT(infer_demo C CXX) CMAKE_MINIMUM_REQUIRED (VERSION 3.10) # 指定下载解压后的fastdeploy库路径 @@ -9,5 +9,6 @@ include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) # 添加FastDeploy依赖头文件 include_directories(${FASTDEPLOY_INCS}) -add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.c) +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/ocr/PP-OCR/ascend/cpp/README.md b/examples/vision/ocr/PP-OCR/ascend/cpp/README.md new file mode 100644 index 0000000000..ed8d63a309 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/ascend/cpp/README.md @@ -0,0 +1,63 @@ +[English](README.md) | 简体中文 +# PP-OCRv3 Ascend C++部署示例 + +本目录下提供`infer.cc`, 供用户完成PP-OCRv3在华为昇腾AI处理器上的部署. + +## 1. 部署环境准备 +在部署前,需确认以下两个步骤 +- 1. 在部署前,需自行编译基于华为昇腾AI处理器的预测库,参考文档[华为昇腾AI处理器部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) +- 2. 部署时需要环境初始化, 请参考[如何使用C++在华为昇腾AI处理器部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_ascend.md) + + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 3.运行部署示例 +``` +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/ascend/cpp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/ascend/cpp + +mkdir build +cd build +# 使用编译完成的FastDeploy库编译infer_demo +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-ascend +make -j + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 按照上文提供的文档完成环境初始化, 并执行以下命令 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg + +# NOTE:若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸, 例如 N 张, 尺寸为 A * B 的图片. +``` + +运行完成可视化结果如下图所示 + +

+ +
+ +## 4. 更多指南 +- [PP-OCR系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md) diff --git a/examples/vision/ocr/PP-OCRv2/cpp/infer_static_shape.cc b/examples/vision/ocr/PP-OCR/ascend/cpp/infer.cc similarity index 67% rename from examples/vision/ocr/PP-OCRv2/cpp/infer_static_shape.cc rename to examples/vision/ocr/PP-OCR/ascend/cpp/infer.cc index 7a48ba879e..dc0a986707 100644 --- a/examples/vision/ocr/PP-OCRv2/cpp/infer_static_shape.cc +++ b/examples/vision/ocr/PP-OCR/ascend/cpp/infer.cc @@ -19,12 +19,12 @@ const char sep = '\\'; const char sep = '/'; #endif -void InitAndInfer(const std::string& det_model_dir, - const std::string& cls_model_dir, - const std::string& rec_model_dir, - const std::string& rec_label_file, - const std::string& image_file, - const fastdeploy::RuntimeOption& option) { +void AscendInfer(const std::string &det_model_dir, + const std::string &cls_model_dir, + const std::string &rec_model_dir, + const std::string &rec_label_file, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { auto det_model_file = det_model_dir + sep + "inference.pdmodel"; auto det_params_file = det_model_dir + sep + "inference.pdiparams"; @@ -34,6 +34,9 @@ void InitAndInfer(const std::string& det_model_dir, auto rec_model_file = rec_model_dir + sep + "inference.pdmodel"; auto rec_params_file = rec_model_dir + sep + "inference.pdiparams"; + fastdeploy::RuntimeOption option; + option.UseAscend(); + auto det_option = option; auto cls_option = option; auto rec_option = option; @@ -45,9 +48,7 @@ void InitAndInfer(const std::string& det_model_dir, auto rec_model = fastdeploy::vision::ocr::Recognizer( rec_model_file, rec_params_file, rec_label_file, rec_option); - // Users could enable static shape infer for rec model when deploy PP-OCR on - // hardware - // which can not support dynamic shape infer well, like Huawei Ascend series. + // When deploy on Ascend, rec model must enable static shape infer as below. rec_model.GetPreprocessor().SetStaticShapeInfer(true); assert(det_model.Initialized()); @@ -56,16 +57,16 @@ void InitAndInfer(const std::string& det_model_dir, // The classification model is optional, so the PP-OCR can also be connected // in series as follows - // auto ppocr_v2 = fastdeploy::pipeline::PPOCRv2(&det_model, &rec_model); - auto ppocr_v2 = - fastdeploy::pipeline::PPOCRv2(&det_model, &cls_model, &rec_model); + // auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model); + auto ppocr_v3 = + fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); // When users enable static shape infer for rec model, the batch size of cls // and rec model must to be set to 1. - ppocr_v2.SetClsBatchSize(1); - ppocr_v2.SetRecBatchSize(1); + ppocr_v3.SetClsBatchSize(1); + ppocr_v3.SetRecBatchSize(1); - if (!ppocr_v2.Initialized()) { + if (!ppocr_v3.Initialized()) { std::cerr << "Failed to initialize PP-OCR." << std::endl; return; } @@ -73,7 +74,7 @@ void InitAndInfer(const std::string& det_model_dir, auto im = cv::imread(image_file); fastdeploy::vision::OCRResult result; - if (!ppocr_v2.Predict(im, &result)) { + if (!ppocr_v3.Predict(im, &result)) { std::cerr << "Failed to predict." << std::endl; return; } @@ -85,36 +86,23 @@ void InitAndInfer(const std::string& det_model_dir, std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; } -int main(int argc, char* argv[]) { - if (argc < 7) { +int main(int argc, char *argv[]) { + if (argc < 6) { std::cout << "Usage: infer_demo path/to/det_model path/to/cls_model " "path/to/rec_model path/to/rec_label_file path/to/image " - "run_option, " - "e.g ./infer_demo ./ch_PP-OCRv2_det_infer " - "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer " - "./ppocr_keys_v1.txt ./12.jpg 0" - << std::endl; - std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " - "with ascend." + "e.g ./infer_demo ./ch_PP-OCRv3_det_infer " + "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer " + "./ppocr_keys_v1.txt ./12.jpg" << std::endl; return -1; } - fastdeploy::RuntimeOption option; - int flag = std::atoi(argv[6]); - - if (flag == 0) { - option.UseCpu(); - } else if (flag == 1) { - option.UseAscend(); - } - std::string det_model_dir = argv[1]; std::string cls_model_dir = argv[2]; std::string rec_model_dir = argv[3]; std::string rec_label_file = argv[4]; std::string test_image = argv[5]; - InitAndInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, - test_image, option); + AscendInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, + test_image); return 0; } diff --git a/examples/vision/ocr/PP-OCR/ascend/python/README.md b/examples/vision/ocr/PP-OCR/ascend/python/README.md new file mode 100644 index 0000000000..13a0fb6445 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/ascend/python/README.md @@ -0,0 +1,55 @@ +[English](README.md) | 简体中文 +# PP-OCRv3 Ascend Python部署示例 + +本目录下提供`infer.py`, 供用户完成PP-OCRv3在华为昇腾AI处理器上的部署. + +## 1. 部署环境准备 +在部署前,需自行编译基于华为昇腾AI处理器的FastDeploy python wheel包并安装,参考文档,参考文档[华为昇腾AI处理器部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 3.运行部署示例 +``` +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/ascend/python + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/ascend/python + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg +# NOTE:若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸, 例如 N 张, 尺寸为 A * B 的图片. +``` + +运行完成可视化结果如下图所示 + +
+ +
+ +## 4. 更多指南 +- [PP-OCR系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 C++部署](../cpp) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md) + +## 5. 常见问题 +- [如何将视觉模型预测结果转为numpy格式](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/vision_result_related_problems.md) diff --git a/examples/vision/ocr/PP-OCRv3/python/infer_static_shape.py b/examples/vision/ocr/PP-OCR/ascend/python/infer.py similarity index 75% rename from examples/vision/ocr/PP-OCRv3/python/infer_static_shape.py rename to examples/vision/ocr/PP-OCR/ascend/python/infer.py index e707d378c3..ceb28e0f7f 100755 --- a/examples/vision/ocr/PP-OCRv3/python/infer_static_shape.py +++ b/examples/vision/ocr/PP-OCR/ascend/python/infer.py @@ -37,16 +37,6 @@ def parse_arguments(): help="Path of Recognization model of PPOCR.") parser.add_argument( "--image", type=str, required=True, help="Path of test image file.") - parser.add_argument( - "--device", - type=str, - default='cpu', - help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") - parser.add_argument( - "--cpu_thread_num", - type=int, - default=9, - help="Number of threads while inference on CPU.") return parser.parse_args() @@ -56,24 +46,21 @@ def build_option(args): cls_option = fd.RuntimeOption() rec_option = fd.RuntimeOption() - # 当前需要对PP-OCR启用静态shape推理的硬件只有昇腾. - if args.device.lower() == "ascend": - det_option.use_ascend() - cls_option.use_ascend() - rec_option.use_ascend() + det_option.use_ascend() + cls_option.use_ascend() + rec_option.use_ascend() return det_option, cls_option, rec_option args = parse_arguments() -# Detection模型, 检测文字框 det_model_file = os.path.join(args.det_model, "inference.pdmodel") det_params_file = os.path.join(args.det_model, "inference.pdiparams") -# Classification模型,方向分类,可选 + cls_model_file = os.path.join(args.cls_model, "inference.pdmodel") cls_params_file = os.path.join(args.cls_model, "inference.pdiparams") -# Recognition模型,文字识别模型 + rec_model_file = os.path.join(args.rec_model, "inference.pdmodel") rec_params_file = os.path.join(args.rec_model, "inference.pdiparams") rec_label_file = args.rec_label_file @@ -89,26 +76,28 @@ cls_model = fd.vision.ocr.Classifier( rec_model = fd.vision.ocr.Recognizer( rec_model_file, rec_params_file, rec_label_file, runtime_option=rec_option) -# Rec模型启用静态shape推理 +# Rec model enable static shape infer. +# When deploy on Ascend, it must be true. rec_model.preprocessor.static_shape_infer = True -# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None +# Create PP-OCRv3, if cls_model is not needed, +# just set cls_model=None . ppocr_v3 = fd.vision.ocr.PPOCRv3( det_model=det_model, cls_model=cls_model, rec_model=rec_model) -# Cls模型和Rec模型的batch size 必须设置为1, 开启静态shape推理 +# The batch size must be set to 1, when enable static shape infer. ppocr_v3.cls_batch_size = 1 ppocr_v3.rec_batch_size = 1 -# 预测图片准备 +# Prepare image. im = cv2.imread(args.image) -#预测并打印结果 +# Print the results. result = ppocr_v3.predict(im) print(result) -# 可视化结果 +# Visuliaze the output. vis_im = fd.vision.vis_ppocr(im, result) cv2.imwrite("visualized_result.jpg", vis_im) print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/README.md b/examples/vision/ocr/PP-OCR/cpu-gpu/README.md new file mode 100644 index 0000000000..69a8e3e7e9 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/README.md @@ -0,0 +1,26 @@ +[English](README.md) | 简体中文 + +# PaddleOCR 模型在CPU与GPU上的部署方案-FastDeploy + +## 1. 说明 +PaddleOCR支持通过FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署PaddleOCR系列模型 + +## 2. 支持的PaddleOCR推理模型 + +下表中的推理模型为FastDeploy测试过的模型, 下载链接由PaddleOCR模型库提供, +更多的模型, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md), 欢迎用户尝试. + +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +|:----|:----|:----|:----|:----|:--------| +| ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | +| en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | +| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | +| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | +| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + + +## 3. 详细部署的部署示例 +- [Python部署](python) +- [C++部署](cpp) +- [C部署](c) +- [C#部署](csharp) diff --git a/examples/vision/ocr/PP-OCRv2/c/CMakeLists.txt b/examples/vision/ocr/PP-OCR/cpu-gpu/c/CMakeLists.txt similarity index 100% rename from examples/vision/ocr/PP-OCRv2/c/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/cpu-gpu/c/CMakeLists.txt diff --git a/examples/vision/ocr/PP-OCRv3/c/README_CN.md b/examples/vision/ocr/PP-OCR/cpu-gpu/c/README.md old mode 100644 new mode 100755 similarity index 74% rename from examples/vision/ocr/PP-OCRv3/c/README_CN.md rename to examples/vision/ocr/PP-OCR/cpu-gpu/c/README.md index eaea4a7c5d..7c58637730 --- a/examples/vision/ocr/PP-OCRv3/c/README_CN.md +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/c/README.md @@ -1,57 +1,73 @@ [English](README.md) | 简体中文 -# PPOCRv3 C部署示例 +# PaddleOCR CPU-GPU C部署示例 -本目录下提供`infer.c`来调用C API快速完成PPOCRv3模型在CPU/GPU上部署的示例。 +本目录下提供`infer.c`来调用C API快速完成PP-OCRv3模型在CPU/GPU上部署的示例。 -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) +## 1. 说明 +PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型. +## 2. 部署环境准备 +在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. 以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4) +## 3. 部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 4.运行部署示例 ```bash +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/cpu-gpu/c + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/cpu-gpu/c + mkdir build cd build + # 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz + +# 编译Demo tar xvf fastdeploy-linux-x64-x.x.x.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x make -j - -# 下载模型,图片和字典文件 +# 下载PP-OCRv3文字检测模型 wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar tar -xvf ch_PP-OCRv3_det_infer.tar - +# 下载文字方向分类器模型 wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - +# 下载PP-OCRv3文字识别模型 wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar tar -xvf ch_PP-OCRv3_rec_infer.tar +# 下载预测图片与字典文件 wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt -# CPU推理 +# 在CPU上使用Paddle Inference推理 ./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU推理 +# 在GPU上使用Paddle Inference推理 ./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 ``` - 以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考: - [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) -如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前初始化部署环境: -- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md) 运行完成可视化结果如下图所示 - -## PPOCRv3 C API接口 +## 5. PP-OCRv3 C API接口简介 +下面提供了PP-OCRv3的C API简介 + +- 如果用户想要更换部署后端或进行其他定制化操作, 请查看[C Runtime API](https://baidu-paddle.github.io/fastdeploy-api/c/html/runtime__option_8h.html). +- 更多 PP-OCR C API 请查看 [C PP-OCR API](https://github.com/PaddlePaddle/FastDeploy/blob/develop/c_api/fastdeploy_capi/vision/ocr/ppocr/model.h) ### 配置 @@ -159,7 +175,7 @@ FD_C_PPOCRv3Wrapper* FD_C_CreatePPOCRv3Wrapper( FD_C_RecognizerWrapper* rec_model ) ``` -> 创建一个PPOCRv3的模型,并且返回操作它的指针。 +> 创建一个PP-OCRv3的模型,并且返回操作它的指针。 > > **参数** > @@ -169,11 +185,11 @@ FD_C_PPOCRv3Wrapper* FD_C_CreatePPOCRv3Wrapper( > > **返回** > -> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): 指向PPOCRv3模型对象的指针 +> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): 指向PP-OCRv3模型对象的指针 -#### 读写图像 +### 读写图像 ```c FD_C_Mat FD_C_Imread(const char* imgpath) @@ -206,7 +222,7 @@ FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img); > * **result**(FD_C_Bool): 表示操作是否成功 -#### Predict函数 +### Predict函数 ```c FD_C_Bool FD_C_PPOCRv3WrapperPredict( @@ -218,12 +234,12 @@ FD_C_Bool FD_C_PPOCRv3WrapperPredict( > 模型预测接口,输入图像直接并生成结果。 > > **参数** -> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): 指向PPOCRv3模型的指针 +> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): 指向PP-OCRv3模型的指针 > * **img**(FD_C_Mat): 输入图像的指针,指向cv::Mat对象,可以调用FD_C_Imread读取图像获取 > * **result**(FD_C_OCRResult*): OCR预测结果,包括由检测模型输出的检测框位置,分类模型输出的方向分类,以及识别模型输出的识别结果, OCRResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) -#### Predict结果 +### Predict结果 ```c FD_C_Mat FD_C_VisOcr(FD_C_Mat im, FD_C_OCRResult* ocr_result) @@ -239,9 +255,9 @@ FD_C_Mat FD_C_VisOcr(FD_C_Mat im, FD_C_OCRResult* ocr_result) > * **vis_im**(FD_C_Mat): 指向可视化图像的指针 -## 其它文档 +## 6. 其它文档 -- [PPOCR 系列模型介绍](../../) -- [PPOCRv3 Python部署](../python) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- [PP-OCRv3 C++ 部署](../cpp) +- [PP-OCRv3 C# 部署](../csharp) diff --git a/examples/vision/ocr/PP-OCRv3/c/infer.c b/examples/vision/ocr/PP-OCR/cpu-gpu/c/infer.c similarity index 85% rename from examples/vision/ocr/PP-OCRv3/c/infer.c rename to examples/vision/ocr/PP-OCR/cpu-gpu/c/infer.c index 007d2ed4d0..d699d2b3a7 100644 --- a/examples/vision/ocr/PP-OCRv3/c/infer.c +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/c/infer.c @@ -37,9 +37,9 @@ const char sep = '\\'; const char sep = '/'; #endif -void CpuInfer(const char* det_model_dir, const char* cls_model_dir, - const char* rec_model_dir, const char* rec_label_file, - const char* image_file) { +void CpuInfer(const char *det_model_dir, const char *cls_model_dir, + const char *rec_model_dir, const char *rec_label_file, + const char *image_file) { char det_model_file[100]; char det_params_file[100]; @@ -65,22 +65,22 @@ void CpuInfer(const char* det_model_dir, const char* cls_model_dir, snprintf(rec_params_file, max_size, "%s%c%s", rec_model_dir, sep, "inference.pdiparams"); - FD_C_RuntimeOptionWrapper* det_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* cls_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* rec_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *det_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *cls_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *rec_option = FD_C_CreateRuntimeOptionWrapper(); FD_C_RuntimeOptionWrapperUseCpu(det_option); FD_C_RuntimeOptionWrapperUseCpu(cls_option); FD_C_RuntimeOptionWrapperUseCpu(rec_option); - FD_C_DBDetectorWrapper* det_model = FD_C_CreateDBDetectorWrapper( + FD_C_DBDetectorWrapper *det_model = FD_C_CreateDBDetectorWrapper( det_model_file, det_params_file, det_option, FD_C_ModelFormat_PADDLE); - FD_C_ClassifierWrapper* cls_model = FD_C_CreateClassifierWrapper( + FD_C_ClassifierWrapper *cls_model = FD_C_CreateClassifierWrapper( cls_model_file, cls_params_file, cls_option, FD_C_ModelFormat_PADDLE); - FD_C_RecognizerWrapper* rec_model = FD_C_CreateRecognizerWrapper( + FD_C_RecognizerWrapper *rec_model = FD_C_CreateRecognizerWrapper( rec_model_file, rec_params_file, rec_label_file, rec_option, FD_C_ModelFormat_PADDLE); - FD_C_PPOCRv3Wrapper* ppocr_v3 = + FD_C_PPOCRv3Wrapper *ppocr_v3 = FD_C_CreatePPOCRv3Wrapper(det_model, cls_model, rec_model); if (!FD_C_PPOCRv3WrapperInitialized(ppocr_v3)) { printf("Failed to initialize.\n"); @@ -96,7 +96,7 @@ void CpuInfer(const char* det_model_dir, const char* cls_model_dir, FD_C_Mat im = FD_C_Imread(image_file); - FD_C_OCRResult* result = FD_C_CreateOCRResult(); + FD_C_OCRResult *result = (FD_C_OCRResult *)malloc(sizeof(FD_C_OCRResult)); if (!FD_C_PPOCRv3WrapperPredict(ppocr_v3, im, result)) { printf("Failed to predict.\n"); @@ -132,9 +132,9 @@ void CpuInfer(const char* det_model_dir, const char* cls_model_dir, FD_C_DestroyMat(vis_im); } -void GpuInfer(const char* det_model_dir, const char* cls_model_dir, - const char* rec_model_dir, const char* rec_label_file, - const char* image_file) { +void GpuInfer(const char *det_model_dir, const char *cls_model_dir, + const char *rec_model_dir, const char *rec_label_file, + const char *image_file) { char det_model_file[100]; char det_params_file[100]; @@ -160,22 +160,22 @@ void GpuInfer(const char* det_model_dir, const char* cls_model_dir, snprintf(rec_params_file, max_size, "%s%c%s", rec_model_dir, sep, "inference.pdiparams"); - FD_C_RuntimeOptionWrapper* det_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* cls_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* rec_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *det_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *cls_option = FD_C_CreateRuntimeOptionWrapper(); + FD_C_RuntimeOptionWrapper *rec_option = FD_C_CreateRuntimeOptionWrapper(); FD_C_RuntimeOptionWrapperUseGpu(det_option, 0); FD_C_RuntimeOptionWrapperUseGpu(cls_option, 0); FD_C_RuntimeOptionWrapperUseGpu(rec_option, 0); - FD_C_DBDetectorWrapper* det_model = FD_C_CreateDBDetectorWrapper( + FD_C_DBDetectorWrapper *det_model = FD_C_CreateDBDetectorWrapper( det_model_file, det_params_file, det_option, FD_C_ModelFormat_PADDLE); - FD_C_ClassifierWrapper* cls_model = FD_C_CreateClassifierWrapper( + FD_C_ClassifierWrapper *cls_model = FD_C_CreateClassifierWrapper( cls_model_file, cls_params_file, cls_option, FD_C_ModelFormat_PADDLE); - FD_C_RecognizerWrapper* rec_model = FD_C_CreateRecognizerWrapper( + FD_C_RecognizerWrapper *rec_model = FD_C_CreateRecognizerWrapper( rec_model_file, rec_params_file, rec_label_file, rec_option, FD_C_ModelFormat_PADDLE); - FD_C_PPOCRv3Wrapper* ppocr_v3 = + FD_C_PPOCRv3Wrapper *ppocr_v3 = FD_C_CreatePPOCRv3Wrapper(det_model, cls_model, rec_model); if (!FD_C_PPOCRv3WrapperInitialized(ppocr_v3)) { printf("Failed to initialize.\n"); @@ -191,7 +191,7 @@ void GpuInfer(const char* det_model_dir, const char* cls_model_dir, FD_C_Mat im = FD_C_Imread(image_file); - FD_C_OCRResult* result = FD_C_CreateOCRResult(); + FD_C_OCRResult *result = (FD_C_OCRResult *)malloc(sizeof(FD_C_OCRResult)); if (!FD_C_PPOCRv3WrapperPredict(ppocr_v3, im, result)) { printf("Failed to predict.\n"); @@ -226,7 +226,7 @@ void GpuInfer(const char* det_model_dir, const char* cls_model_dir, FD_C_DestroyMat(im); FD_C_DestroyMat(vis_im); } -int main(int argc, char* argv[]) { +int main(int argc, char *argv[]) { if (argc < 7) { printf( "Usage: infer_demo path/to/det_model path/to/cls_model " diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/CMakeLists.txt b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fe4e03f266 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/CMakeLists.txt @@ -0,0 +1,30 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.10) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +# PP-OCR +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) + +# Only Det +add_executable(infer_det ${PROJECT_SOURCE_DIR}/infer_det.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_det ${FASTDEPLOY_LIBS}) + +# Only Cls +add_executable(infer_cls ${PROJECT_SOURCE_DIR}/infer_cls.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_cls ${FASTDEPLOY_LIBS}) + +# Only Rec +add_executable(infer_rec ${PROJECT_SOURCE_DIR}/infer_rec.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_rec ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/README.md b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/README.md new file mode 100644 index 0000000000..4481f49be5 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/README.md @@ -0,0 +1,163 @@ +[English](README.md) | 简体中文 +# PaddleOCR CPU-GPU C++部署示例 + +本目录下提供`infer.cc`快速完成PP-OCRv3在CPU/GPU,以及GPU上通过Paddle-TensorRT加速部署的示例. +## 1. 说明 +PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型. + +## 2. 部署环境准备 +在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. + +## 3. 部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 4. 运行部署示例 +以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.0以上(x.x.x>=1.0.0) + +```bash +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/cpu-gpu/cpp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/cpu-gpu/cpp + +# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 +wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz +tar xvf fastdeploy-linux-x64-x.x.x.tgz + +# 编译部署示例 +mkdir build && cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x +make -j + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 运行部署示例 +# 在CPU上使用Paddle Inference推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 +# 在CPU上使用OenVINO推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 +# 在CPU上使用ONNX Runtime推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 2 +# 在CPU上使用Paddle Lite推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 3 +# 在GPU上使用Paddle Inference推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 4 +# 在GPU上使用Paddle TensorRT推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 5 +# 在GPU上使用ONNX Runtime推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 6 +# 在GPU上使用Nvidia TensorRT推理 +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 7 + +# 同时, FastDeploy提供文字检测,文字分类,文字识别三个模型的单独推理, +# 有需要的用户, 请准备合适的图片, 同时根据自己的需求, 参考infer.cc来配置自定义硬件与推理后端. + +# 在CPU上,单独使用文字检测模型部署 +./infer_det ./ch_PP-OCRv3_det_infer ./12.jpg 0 + +# 在CPU上,单独使用文字方向分类模型部署 +./infer_cls ./ch_ppocr_mobile_v2.0_cls_infer ./12.jpg 0 + +# 在CPU上,单独使用文字识别模型部署 +./infer_rec ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 +``` + +运行完成可视化结果如下图所示 +
+ +
+ +- 注意,以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考文档: [如何在Windows中使用FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/use_sdk_on_windows.md) +- 关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md) + +## 5. 部署示例选项说明 +在我们使用`infer_demo`时, 输入了6个参数, 分别为文字检测模型, 文字分类模型, 文字识别模型, 预测图片, 字典文件与最后一位的数字选项. +现在下表将解释最后一位数字选项的含义. +|数字选项|含义| +|:---:|:---:| +|0| 在CPU上使用Paddle Inference推理 | +|1| 在CPU上使用OenVINO推理 | +|2| 在CPU上使用ONNX Runtime推理 | +|3| 在CPU上使用Paddle Lite推理 | +|4| 在GPU上使用Paddle Inference推理 | +|5| 在GPU上使用Paddle TensorRT推理 | +|6| 在GPU上使用ONNX Runtime推理 | +|7| 在GPU上使用Nvidia TensorRT推理 | + +关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md) + +## 6. 更多指南 + +### 6.1 如何使用C++部署PP-OCRv2系列模型. +本目录下的`infer.cc`代码是以PP-OCRv3模型为例, 如果用户有使用PP-OCRv2的需求, 只需要按照下面所示的方式, 来创建PP-OCRv2并使用. + +```cpp +// 此行为创建PP-OCRv3模型的代码 +auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); +// 只需要将PPOCRv3改为PPOCRv2,即可创造PPOCRv2模型, 同时, 后续的接口均使用ppocr_v2来调用 +auto ppocr_v2 = fastdeploy::pipeline::PPOCRv2(&det_model, &cls_model, &rec_model); + +// 如果用户在部署PP-OCRv2时, 需要使用TensorRT推理, 还需要改动Rec模型的TensorRT的输入shape. +// 建议如下修改, 需要把 H 维度改为32, W 纬度按需修改. +rec_option.SetTrtInputShape("x", {1, 3, 32, 10}, {rec_batch_size, 3, 32, 320}, + {rec_batch_size, 3, 32, 2304}); +``` +### 6.2 如何在PP-OCRv2/v3系列模型中, 关闭文字方向分类器的使用. + +在PP-OCRv3/v2中, 文字方向分类器是可选的, 用户可以按照以下方式, 来决定自己是否使用方向分类器. +```cpp +// 使用 Cls 模型 +auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); + +// 不使用 Cls 模型 +auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model); + +// 当不使用Cls模型时, 请删掉或者注释掉相关代码 +``` + +### 6.3 如何修改前后处理超参数. +在示例代码中, 我们展示出了修改前后处理超参数的接口,并设置为默认值,其中, FastDeploy提供的超参数的含义与文档[PaddleOCR推理模型参数解释](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_args.md)是相同的. 如果用户想要进行更多定制化的开发, 请阅读[PP-OCR系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1ocr.html) + +```cpp +// 设置检测模型的max_side_len +det_model.GetPreprocessor().SetMaxSideLen(960); +// 其他... +``` + +### 6.4 其他指南 +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- [PP-OCRv3 C 部署](../c) +- [PP-OCRv3 C# 部署](../csharp) + +## 7. 常见问题 +- PaddleOCR能在FastDeploy支持的多种后端上推理,支持情况如下表所示, 如何切换后端, 详见文档[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md) + +|硬件类型|支持的后端| +|:---:|:---:| +|X86 CPU| Paddle Inference, ONNX Runtime, OpenVINO | +|ARM CPU| Paddle Lite | +|飞腾 CPU| ONNX Runtime | +|NVIDIA GPU| Paddle Inference, ONNX Runtime, TensorRT | + +- [Intel GPU(独立显卡/集成显卡)的使用](https://github.com/PaddlePaddle/FastDeploy/blob/develop/tutorials/intel_gpu/README.md) +- [编译CPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/cpu.md) +- [编译GPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/gpu.md) +- [编译Jetson部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/jetson.md) diff --git a/examples/vision/ocr/PP-OCRv3/cpp/infer.cc b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer.cc old mode 100755 new mode 100644 similarity index 59% rename from examples/vision/ocr/PP-OCRv3/cpp/infer.cc rename to examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer.cc index 3b35c1d44e..87188fd1ad --- a/examples/vision/ocr/PP-OCRv3/cpp/infer.cc +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer.cc @@ -19,7 +19,12 @@ const char sep = '\\'; const char sep = '/'; #endif -void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model_dir, const std::string& rec_model_dir, const std::string& rec_label_file, const std::string& image_file, const fastdeploy::RuntimeOption& option) { +void InitAndInfer(const std::string &det_model_dir, + const std::string &cls_model_dir, + const std::string &rec_model_dir, + const std::string &rec_label_file, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { auto det_model_file = det_model_dir + sep + "inference.pdmodel"; auto det_params_file = det_model_dir + sep + "inference.pdiparams"; @@ -34,50 +39,71 @@ void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model auto rec_option = option; // The cls and rec model can inference a batch of images now. - // User could initialize the inference batch size and set them after create PP-OCR model. + // User could initialize the inference batch size and set them after create + // PP-OCR model. int cls_batch_size = 1; int rec_batch_size = 6; // If use TRT backend, the dynamic shape will be set as follow. - // We recommend that users set the length and height of the detection model to a multiple of 32. + // We recommend that users set the length and height of the detection model to + // a multiple of 32. // We also recommend that users set the Trt input shape as follow. - det_option.SetTrtInputShape("x", {1, 3, 64,64}, {1, 3, 640, 640}, - {1, 3, 960, 960}); - cls_option.SetTrtInputShape("x", {1, 3, 48, 10}, {cls_batch_size, 3, 48, 320}, {cls_batch_size, 3, 48, 1024}); + det_option.SetTrtInputShape("x", {1, 3, 64, 64}, {1, 3, 640, 640}, + {1, 3, 960, 960}); + cls_option.SetTrtInputShape("x", {1, 3, 48, 10}, {cls_batch_size, 3, 48, 320}, + {cls_batch_size, 3, 48, 1024}); rec_option.SetTrtInputShape("x", {1, 3, 48, 10}, {rec_batch_size, 3, 48, 320}, - {rec_batch_size, 3, 48, 2304}); - - // Users could save TRT cache file to disk as follow. + {rec_batch_size, 3, 48, 2304}); + + // Users could save TRT cache file to disk as follow. // det_option.SetTrtCacheFile(det_model_dir + sep + "det_trt_cache.trt"); // cls_option.SetTrtCacheFile(cls_model_dir + sep + "cls_trt_cache.trt"); // rec_option.SetTrtCacheFile(rec_model_dir + sep + "rec_trt_cache.trt"); - auto det_model = fastdeploy::vision::ocr::DBDetector(det_model_file, det_params_file, det_option); - auto cls_model = fastdeploy::vision::ocr::Classifier(cls_model_file, cls_params_file, cls_option); - auto rec_model = fastdeploy::vision::ocr::Recognizer(rec_model_file, rec_params_file, rec_label_file, rec_option); + auto det_model = fastdeploy::vision::ocr::DBDetector( + det_model_file, det_params_file, det_option); + auto cls_model = fastdeploy::vision::ocr::Classifier( + cls_model_file, cls_params_file, cls_option); + auto rec_model = fastdeploy::vision::ocr::Recognizer( + rec_model_file, rec_params_file, rec_label_file, rec_option); assert(det_model.Initialized()); assert(cls_model.Initialized()); assert(rec_model.Initialized()); - // The classification model is optional, so the PP-OCR can also be connected in series as follows - // auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model); - auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); - - // Set inference batch size for cls model and rec model, the value could be -1 and 1 to positive infinity. - // When inference batch size is set to -1, it means that the inference batch size - // of the cls and rec models will be the same as the number of boxes detected by the det model. - ppocr_v3.SetClsBatchSize(cls_batch_size); - ppocr_v3.SetRecBatchSize(rec_batch_size); + // Parameters settings for pre and post processing of Det/Cls/Rec Models. + // All parameters are set to default values. + det_model.GetPreprocessor().SetMaxSideLen(960); + det_model.GetPostprocessor().SetDetDBThresh(0.3); + det_model.GetPostprocessor().SetDetDBBoxThresh(0.6); + det_model.GetPostprocessor().SetDetDBUnclipRatio(1.5); + det_model.GetPostprocessor().SetDetDBScoreMode("slow"); + det_model.GetPostprocessor().SetUseDilation(0); + cls_model.GetPostprocessor().SetClsThresh(0.9); - if(!ppocr_v3.Initialized()){ + // The classification model is optional, so the PP-OCR can also be connected + // in series as follows + // auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model); + auto ppocr_v3 = + fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); + + // Set inference batch size for cls model and rec model, the value could be -1 + // and 1 to positive infinity. + // When inference batch size is set to -1, it means that the inference batch + // size + // of the cls and rec models will be the same as the number of boxes detected + // by the det model. + ppocr_v3.SetClsBatchSize(cls_batch_size); + ppocr_v3.SetRecBatchSize(rec_batch_size); + + if (!ppocr_v3.Initialized()) { std::cerr << "Failed to initialize PP-OCR." << std::endl; return; } auto im = cv::imread(image_file); auto im_bak = im.clone(); - + fastdeploy::vision::OCRResult result; if (!ppocr_v3.Predict(&im, &result)) { std::cerr << "Failed to predict." << std::endl; @@ -91,7 +117,7 @@ void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; } -int main(int argc, char* argv[]) { +int main(int argc, char *argv[]) { if (argc < 7) { std::cout << "Usage: infer_demo path/to/det_model path/to/cls_model " "path/to/rec_model path/to/rec_label_file path/to/image " @@ -100,8 +126,8 @@ int main(int argc, char* argv[]) { "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer " "./ppocr_keys_v1.txt ./12.jpg 0" << std::endl; - std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " - "with gpu; 2: run with gpu and use tensorrt backend; 3: run with gpu and use Paddle-TRT; 4: run with kunlunxin." + std::cout << "The data type of run_option is int, e.g. 0: run with paddle " + "inference on cpu;" << std::endl; return -1; } @@ -110,19 +136,31 @@ int main(int argc, char* argv[]) { int flag = std::atoi(argv[6]); if (flag == 0) { - option.UseCpu(); + option.UseCpu(); + option.UsePaddleBackend(); // Paddle Inference } else if (flag == 1) { - option.UseGpu(); + option.UseCpu(); + option.UseOpenVINOBackend(); // OpenVINO } else if (flag == 2) { - option.UseGpu(); - option.UseTrtBackend(); + option.UseCpu(); + option.UseOrtBackend(); // ONNX Runtime } else if (flag == 3) { - option.UseGpu(); - option.UseTrtBackend(); - option.EnablePaddleTrtCollectShape(); - option.EnablePaddleToTrt(); + option.UseCpu(); + option.UseLiteBackend(); // Paddle Lite } else if (flag == 4) { - option.UseKunlunXin(); + option.UseGpu(); + option.UsePaddleBackend(); // Paddle Inference + } else if (flag == 5) { + option.UseGpu(); + option.UsePaddleInferBackend(); + option.paddle_infer_option.collect_trt_shape = true; + option.paddle_infer_option.enable_trt = true; // Paddle-TensorRT + } else if (flag == 6) { + option.UseGpu(); + option.UseOrtBackend(); // ONNX Runtime + } else if (flag == 7) { + option.UseGpu(); + option.UseTrtBackend(); // TensorRT } std::string det_model_dir = argv[1]; @@ -130,6 +168,7 @@ int main(int argc, char* argv[]) { std::string rec_model_dir = argv[3]; std::string rec_label_file = argv[4]; std::string test_image = argv[5]; - InitAndInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, test_image, option); + InitAndInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, + test_image, option); return 0; } diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_cls.cc b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_cls.cc new file mode 100644 index 0000000000..789c2a9f36 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_cls.cc @@ -0,0 +1,79 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" +#ifdef WIN32 +const char sep = '\\'; +#else +const char sep = '/'; +#endif + +void InitAndInfer(const std::string &cls_model_dir, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { + auto cls_model_file = cls_model_dir + sep + "inference.pdmodel"; + auto cls_params_file = cls_model_dir + sep + "inference.pdiparams"; + auto cls_option = option; + + auto cls_model = fastdeploy::vision::ocr::Classifier( + cls_model_file, cls_params_file, cls_option); + assert(cls_model.Initialized()); + + // Parameters settings for pre and post processing of Cls Model. + cls_model.GetPostprocessor().SetClsThresh(0.9); + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::OCRResult result; + if (!cls_model.Predict(im, &result)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + // User can infer a batch of images by following code. + // if (!cls_model.BatchPredict({im}, &result)) { + // std::cerr << "Failed to predict." << std::endl; + // return; + // } + + std::cout << result.Str() << std::endl; +} + +int main(int argc, char *argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/cls_model path/to/image " + "run_option, " + "e.g ./infer_demo ./ch_ppocr_mobile_v2.0_cls_infer ./12.jpg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu;." + << std::endl; + return -1; + } + + fastdeploy::RuntimeOption option; + int flag = std::atoi(argv[3]); + + if (flag == 0) { + option.UseCpu(); + } else if (flag == 1) { + option.UseGpu(); + } + + std::string cls_model_dir = argv[1]; + std::string test_image = argv[2]; + InitAndInfer(cls_model_dir, test_image, option); + return 0; +} \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_det.cc b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_det.cc new file mode 100644 index 0000000000..8b1cea4b9e --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_det.cc @@ -0,0 +1,82 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" +#ifdef WIN32 +const char sep = '\\'; +#else +const char sep = '/'; +#endif + +void InitAndInfer(const std::string &det_model_dir, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { + auto det_model_file = det_model_dir + sep + "inference.pdmodel"; + auto det_params_file = det_model_dir + sep + "inference.pdiparams"; + auto det_option = option; + + auto det_model = fastdeploy::vision::ocr::DBDetector( + det_model_file, det_params_file, det_option); + assert(det_model.Initialized()); + + // Parameters settings for pre and post processing of Det Model. + det_model.GetPreprocessor().SetMaxSideLen(960); + det_model.GetPostprocessor().SetDetDBThresh(0.3); + det_model.GetPostprocessor().SetDetDBBoxThresh(0.6); + det_model.GetPostprocessor().SetDetDBUnclipRatio(1.5); + det_model.GetPostprocessor().SetDetDBScoreMode("slow"); + det_model.GetPostprocessor().SetUseDilation(0); + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::OCRResult result; + if (!det_model.Predict(im, &result)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + std::cout << result.Str() << std::endl; + + auto vis_im = fastdeploy::vision::VisOcr(im_bak, result); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char *argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/det_model path/to/image " + "run_option, " + "e.g ./infer_demo ./ch_PP-OCRv3_det_infer ./12.jpg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu;." + << std::endl; + return -1; + } + + fastdeploy::RuntimeOption option; + int flag = std::atoi(argv[3]); + + if (flag == 0) { + option.UseCpu(); + } else if (flag == 1) { + option.UseGpu(); + } + + std::string det_model_dir = argv[1]; + std::string test_image = argv[2]; + InitAndInfer(det_model_dir, test_image, option); + return 0; +} diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_rec.cc b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_rec.cc new file mode 100644 index 0000000000..e07e2a0cd8 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/cpp/infer_rec.cc @@ -0,0 +1,83 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" +#ifdef WIN32 +const char sep = '\\'; +#else +const char sep = '/'; +#endif + +void InitAndInfer(const std::string &rec_model_dir, + const std::string &rec_label_file, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { + auto rec_model_file = rec_model_dir + sep + "inference.pdmodel"; + auto rec_params_file = rec_model_dir + sep + "inference.pdiparams"; + auto rec_option = option; + + auto rec_model = fastdeploy::vision::ocr::Recognizer( + rec_model_file, rec_params_file, rec_label_file, rec_option); + + assert(rec_model.Initialized()); + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::OCRResult result; + + if (!rec_model.Predict(im, &result)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + // User can infer a batch of images by following code. + // if (!rec_model.BatchPredict({im}, &result)) { + // std::cerr << "Failed to predict." << std::endl; + // return; + // } + + std::cout << result.Str() << std::endl; +} + +int main(int argc, char *argv[]) { + if (argc < 5) { + std::cout << "Usage: infer_demo" + "path/to/rec_model path/to/rec_label_file path/to/image " + "run_option, " + "e.g ./infer_demo " + "./ch_PP-OCRv3_rec_infer " + "./ppocr_keys_v1.txt ./12.jpg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu;" + << std::endl; + return -1; + } + + fastdeploy::RuntimeOption option; + int flag = std::atoi(argv[4]); + + if (flag == 0) { + option.UseCpu(); + } else if (flag == 1) { + option.UseGpu(); + } + + std::string rec_model_dir = argv[1]; + std::string rec_label_file = argv[2]; + std::string test_image = argv[3]; + InitAndInfer(rec_model_dir, rec_label_file, test_image, option); + return 0; +} diff --git a/examples/vision/ocr/PP-OCRv2/csharp/CMakeLists.txt b/examples/vision/ocr/PP-OCR/cpu-gpu/csharp/CMakeLists.txt similarity index 100% rename from examples/vision/ocr/PP-OCRv2/csharp/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/cpu-gpu/csharp/CMakeLists.txt diff --git a/examples/vision/ocr/PP-OCRv3/csharp/README_CN.md b/examples/vision/ocr/PP-OCR/cpu-gpu/csharp/README.md old mode 100644 new mode 100755 similarity index 60% rename from examples/vision/ocr/PP-OCRv3/csharp/README_CN.md rename to examples/vision/ocr/PP-OCR/cpu-gpu/csharp/README.md index 52c8f1003d..3a87730e19 --- a/examples/vision/ocr/PP-OCRv3/csharp/README_CN.md +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/csharp/README.md @@ -1,52 +1,67 @@ [English](README.md) | 简体中文 -# PPOCRv3 C#部署示例 +# PaddleOCR CPU-GPU C#部署示例 本目录下提供`infer.cs`来调用C# API快速完成PPOCRv3模型在CPU/GPU上部署的示例。 -在部署前,需确认以下两个步骤 +## 1. 说明 +PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型. -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) +## 2. 部署环境准备 +在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. 在本目录执行如下命令即可在Windows完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4) -在本目录执行如下命令即可在Windows完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4) +## 3. 部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. -## 1. 下载C#包管理程序nuget客户端 +## 4. 部署示例 + +### 4.1 下载C#包管理程序nuget客户端 > https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe - 下载完成后将该程序添加到环境变量**PATH**中 -## 2. 下载模型文件和测试图片 +### 4.2. 下载模型文件和测试图片 > https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar # (下载后解压缩) > https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar > https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar > https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg > https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt -## 3. 编译示例代码 +### 4.3 编译示例代码 + +本文档编译的示例代码的编译工具依赖VS 2019,**Windows打开x64 Native Tools Command Prompt for VS 2019命令工具**,通过如下命令开始编译 -本文档编译的示例代码可在解压的库中找到,编译工具依赖VS 2019的安装,**Windows打开x64 Native Tools Command Prompt for VS 2019命令工具**,通过如下命令开始编译 ```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv3\csharp +## 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 +https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz + +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd D:\FastDeploy\examples\vision\ocr\PP-OCR\cpu-gpu\csharp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd D:\PaddleOCR\deploy\fastdeploy\cpu-gpu\csharp mkdir build && cd build -cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2" +cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2" nuget restore msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64 ``` 关于使用Visual Studio 2019创建sln工程,或者CMake工程等方式编译的更详细信息,可参考如下文档 -- [在 Windows 使用 FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) -- [FastDeploy C++库在Windows上的多种使用方式](../../../../../docs/cn/faq/use_sdk_on_windows_build.md) +- [在 Windows 使用 FastDeploy C++ SDK](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq/use_sdk_on_windows.md) +- [FastDeploy C++库在Windows上的多种使用方式](https://github.com/PaddlePaddle/FastDeploy/tree/develop/docs/cn/faq/use_sdk_on_windows_build.md) -## 4. 运行可执行程序 +### 4.4 运行可执行程序 注意Windows上运行时,需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录(可能生成的可执行文件在Release下还有一层目录,这里假设生成的可执行文件在Release处) ```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x +cd D:\fastdeploy-win-x64-gpu-x.x.x -fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv3\csharp\build\Release +fastdeploy_init.bat install %cd% D:\PaddleOCR\deploy\fastdeploy\cpu-gpu\csharp\build\Release ``` 将dll拷贝到当前路径后,准备好模型和图片,使用如下命令运行可执行程序即可 @@ -58,7 +73,11 @@ infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v3.0_cls_infer ./ch_PP-OCRv infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v3.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 ``` -## PPOCRv3 C#接口 +## 5. PP-OCRv3 C# API接口简介 +下面提供了PP-OCRv3的C# API简介 + +- 如果用户想要更换部署后端或进行其他定制化操作, 请查看[C# Runtime API](https://baidu-paddle.github.io/fastdeploy-api/csharp/html/classfastdeploy_1_1RuntimeOption.html). +- 更多 PP-OCR C# API 请查看 [C# PP-OCR API](https://github.com/PaddlePaddle/FastDeploy/blob/develop/csharp/fastdeploy/vision/ocr/model.cs) ### 模型 @@ -122,7 +141,7 @@ fastdeploy.pipeline.PPOCRv3Model( Recognizer recognizer) ``` -> PPOCRv3Model模型加载和初始化。 +> PP-OCRv3Model模型加载和初始化。 > **参数** @@ -147,7 +166,8 @@ fastdeploy.OCRResult Predict(OpenCvSharp.Mat im) >> * **result**: OCR预测结果,包括由检测模型输出的检测框位置,分类模型输出的方向分类,以及识别模型输出的识别结果, OCRResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) -- [模型介绍](../../) -- [Python部署](../python) -- [视觉模型预测结果](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) +## 6. 其它文档 +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- [PP-OCRv3 C++ 部署](../cpp) +- [PP-OCRv3 C 部署](../c) diff --git a/examples/vision/ocr/PP-OCRv3/csharp/infer.cs b/examples/vision/ocr/PP-OCR/cpu-gpu/csharp/infer.cs similarity index 100% rename from examples/vision/ocr/PP-OCRv3/csharp/infer.cs rename to examples/vision/ocr/PP-OCR/cpu-gpu/csharp/infer.cs diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/python/README.md b/examples/vision/ocr/PP-OCR/cpu-gpu/python/README.md new file mode 100644 index 0000000000..d8143e0289 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/python/README.md @@ -0,0 +1,153 @@ +[English](README.md) | 简体中文 +# PaddleOCR CPU-GPU Python部署示例 +本目录下提供`infer.py`快速完成PP-OCRv3在CPU/GPU,以及GPU上通过Paddle-TensorRT加速部署的示例. + +## 1. 说明 +PaddleOCR支持利用FastDeploy在NVIDIA GPU、X86 CPU、飞腾CPU、ARM CPU、Intel GPU(独立显卡/集成显卡)硬件上快速部署OCR模型 + +## 2. 部署环境准备 +在部署前,需确认软硬件环境,同时下载预编译部署库,参考[FastDeploy安装文档](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装)安装FastDeploy预编译库. + +## 3. 部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 4. 运行部署示例 +```bash +# 安装FastDpeloy python包(详细文档请参考`部署环境准备`) +pip install fastdeploy-gpu-python -f https://www.paddlepaddle.org.cn/whl/fastdeploy.html +conda config --add channels conda-forge && conda install cudatoolkit=11.2 cudnn=8.2 + +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/cpu-gpu/python + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/cpu-gpu/python + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 运行部署示例 +# 在CPU上使用Paddle Inference推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu --backend paddle +# 在CPU上使用OenVINO推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu --backend openvino +# 在CPU上使用ONNX Runtime推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu --backend ort +# 在CPU上使用Paddle Lite推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu --backend pplite +# 在GPU上使用Paddle Inference推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend paddle +# 在GPU上使用Paddle TensorRT推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend pptrt +# 在GPU上使用ONNX Runtime推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend ort +# 在GPU上使用Nvidia TensorRT推理 +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend trt + +# 同时, FastDeploy提供文字检测,文字分类,文字识别三个模型的单独推理, +# 有需要的用户, 请准备合适的图片, 同时根据自己的需求, 参考infer.py来配置自定义硬件与推理后端. + +# 在CPU上,单独使用文字检测模型部署 +python infer_det.py --det_model ch_PP-OCRv3_det_infer --image 12.jpg --device cpu + +# 在CPU上,单独使用文字方向分类模型部署 +python infer_cls.py --cls_model ch_ppocr_mobile_v2.0_cls_infer --image 12.jpg --device cpu + +# 在CPU上,单独使用文字识别模型部署 +python infer_rec.py --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu + +``` + +运行完成可视化结果如下图所示 +
+ +
+ +## 5. 部署示例选项说明 + +|参数|含义|默认值 +|---|---|---| +|--det_model|指定检测模型文件夹所在的路径|None| +|--cls_model|指定分类模型文件夹所在的路径|None| +|--rec_model|指定识别模型文件夹所在的路径|None| +|--rec_label_file|识别模型所需label所在的路径|None| +|--image|指定测试图片所在的路径|None| +|--device|指定即将运行的硬件类型,支持的值为`[cpu, gpu]`,当设置为cpu时,可运行在x86 cpu/arm cpu等cpu上|cpu| +|--device_id|使用gpu时, 指定设备号|0| +|--backend|部署模型时使用的后端, 支持的值为`[paddle,pptrt,pplite,ort,openvino,trt]` |paddle| + +关于如何通过FastDeploy使用更多不同的推理后端,以及如何使用不同的硬件,请参考文档:[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md) + +## 6. 更多指南 + +### 6.1 如何使用Python部署PP-OCRv2系列模型. +本目录下的`infer.py`代码是以PP-OCRv3模型为例, 如果用户有使用PP-OCRv2的需求, 只需要按照下面所示的方式, 来创建PP-OCRv2并使用. + +```python +# 此行为创建PP-OCRv3模型的代码 +ppocr_v3 = fd.vision.ocr.PPOCRv3(det_model=det_model, cls_model=cls_model, rec_model=rec_model) +# 只需要将PPOCRv3改为PPOCRv2,即可创造PPOCRv2模型, 同时, 后续的接口均使用ppocr_v2来调用 +ppocr_v2 = fd.vision.ocr.PPOCRv2(det_model=det_model, cls_model=cls_model, rec_model=rec_model) + +# 如果用户在部署PP-OCRv2时, 需要使用TensorRT推理, 还需要改动Rec模型的TensorRT的输入shape. +# 建议如下修改, 需要把 H 维度改为32, W 纬度按需修改. +rec_option.set_trt_input_shape("x", [1, 3, 32, 10], + [args.rec_bs, 3, 32, 320], + [args.rec_bs, 3, 32, 2304]) +``` + +### 6.2 如何在PP-OCRv2/v3系列模型中, 关闭文字方向分类器的使用. + +在PP-OCRv3/v2中, 文字方向分类器是可选的, 用户可以按照以下方式, 来决定自己是否使用方向分类器. +```python +# 使用 Cls 模型 +ppocr_v3 = fd.vision.ocr.PPOCRv3(det_model=det_model, cls_model=cls_model, rec_model=rec_model) + +# 不使用 Cls 模型 +ppocr_v3 = fd.vision.ocr.PPOCRv3(det_model=det_model, cls_model=None, rec_model=rec_model) +``` +### 6.3 如何修改前后处理超参数. +在示例代码中, 我们展示出了修改前后处理超参数的接口,并设置为默认值,其中, FastDeploy提供的超参数的含义与文档[PaddleOCR推理模型参数解释](https://github.com/PaddlePaddle/PaddleOCR/blob/dygraph/doc/doc_ch/inference_args.md)是相同的. 如果用户想要进行更多定制化的开发, 请阅读[PP-OCR系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/ocr.html) + +```python +# 设置检测模型的max_side_len +det_model.preprocessor.max_side_len = 960 +# 其他... +``` + +### 6.4 其他指南 +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 C++部署](../cpp) +- [PP-OCRv3 C 部署](../c) +- [PP-OCRv3 C# 部署](../csharp) + +## 7. 常见问题 +- PaddleOCR能在FastDeploy支持的多种后端上推理,支持情况如下表所示, 如何切换后端, 详见文档[如何切换模型推理后端引擎](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/how_to_change_backend.md) + +|硬件类型|支持的后端| +|:---:|:---:| +|X86 CPU| Paddle Inference, ONNX Runtime, OpenVINO | +|ARM CPU| Paddle Lite | +|飞腾 CPU| ONNX Runtime | +|NVIDIA GPU| Paddle Inference, ONNX Runtime, TensorRT | + +- [如何将模型预测结果转为numpy格式](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/vision_result_related_problems.md) +- [Intel GPU(独立显卡/集成显卡)的使用](https://github.com/PaddlePaddle/FastDeploy/blob/develop/tutorials/intel_gpu/README.md) +- [编译CPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/cpu.md) +- [编译GPU部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/gpu.md) +- [编译Jetson部署库](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/jetson.md) diff --git a/examples/vision/ocr/PP-OCRv3/python/infer.py b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer.py similarity index 72% rename from examples/vision/ocr/PP-OCRv3/python/infer.py rename to examples/vision/ocr/PP-OCR/cpu-gpu/python/infer.py index 6dabce80ee..8eac845998 100755 --- a/examples/vision/ocr/PP-OCRv3/python/infer.py +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer.py @@ -41,23 +41,12 @@ def parse_arguments(): "--device", type=str, default='cpu', - help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") - parser.add_argument( - "--backend", - type=str, - default="default", - help="Type of inference backend, support ort/trt/paddle/openvino, default 'openvino' for cpu, 'tensorrt' for gpu" - ) + help="Type of inference device, support 'cpu' or 'gpu'.") parser.add_argument( "--device_id", type=int, default=0, help="Define which GPU card used to run model.") - parser.add_argument( - "--cpu_thread_num", - type=int, - default=9, - help="Number of threads while inference on CPU.") parser.add_argument( "--cls_bs", type=int, @@ -68,6 +57,13 @@ def parse_arguments(): type=int, default=6, help="Recognition model inference batch size") + parser.add_argument( + "--backend", + type=str, + default="default", + help="Type of inference backend, support ort/trt/paddle/openvino, default 'openvino' for cpu, 'tensorrt' for gpu" + ) + return parser.parse_args() @@ -77,22 +73,11 @@ def build_option(args): cls_option = fd.RuntimeOption() rec_option = fd.RuntimeOption() - det_option.set_cpu_thread_num(args.cpu_thread_num) - cls_option.set_cpu_thread_num(args.cpu_thread_num) - rec_option.set_cpu_thread_num(args.cpu_thread_num) - if args.device.lower() == "gpu": det_option.use_gpu(args.device_id) cls_option.use_gpu(args.device_id) rec_option.use_gpu(args.device_id) - if args.device.lower() == "kunlunxin": - det_option.use_kunlunxin() - cls_option.use_kunlunxin() - rec_option.use_kunlunxin() - - return det_option, cls_option, rec_option - if args.backend.lower() == "trt": assert args.device.lower( ) == "gpu", "TensorRT backend require inference on device GPU." @@ -100,8 +85,9 @@ def build_option(args): cls_option.use_trt_backend() rec_option.use_trt_backend() - # 设置trt input shape - # 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数. + # If use TRT backend, the dynamic shape will be set as follow. + # We recommend that users set the length and height of the detection model to a multiple of 32. + # We also recommend that users set the Trt input shape as follow. det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], [1, 3, 960, 960]) cls_option.set_trt_input_shape("x", [1, 3, 48, 10], @@ -111,7 +97,7 @@ def build_option(args): [args.rec_bs, 3, 48, 320], [args.rec_bs, 3, 48, 2304]) - # 用户可以把TRT引擎文件保存至本地 + # Users could save TRT cache file to disk as follow. det_option.set_trt_cache_file(args.det_model + "/det_trt_cache.trt") cls_option.set_trt_cache_file(args.cls_model + "/cls_trt_cache.trt") rec_option.set_trt_cache_file(args.rec_model + "/rec_trt_cache.trt") @@ -119,20 +105,21 @@ def build_option(args): elif args.backend.lower() == "pptrt": assert args.device.lower( ) == "gpu", "Paddle-TensorRT backend require inference on device GPU." - det_option.use_trt_backend() - det_option.enable_paddle_trt_collect_shape() - det_option.enable_paddle_to_trt() + det_option.use_paddle_infer_backend() + det_option.paddle_infer_option.collect_trt_shape = True + det_option.paddle_infer_option.enable_trt = True - cls_option.use_trt_backend() - cls_option.enable_paddle_trt_collect_shape() - cls_option.enable_paddle_to_trt() + cls_option.use_paddle_infer_backend() + cls_option.paddle_infer_option.collect_trt_shape = True + cls_option.paddle_infer_option.enable_trt = True - rec_option.use_trt_backend() - rec_option.enable_paddle_trt_collect_shape() - rec_option.enable_paddle_to_trt() + rec_option.use_paddle_infer_backend() + rec_option.paddle_infer_option.collect_trt_shape = True + rec_option.paddle_infer_option.enable_trt = True - # 设置trt input shape - # 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数. + # If use TRT backend, the dynamic shape will be set as follow. + # We recommend that users set the length and height of the detection model to a multiple of 32. + # We also recommend that users set the Trt input shape as follow. det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], [1, 3, 960, 960]) cls_option.set_trt_input_shape("x", [1, 3, 48, 10], @@ -142,7 +129,7 @@ def build_option(args): [args.rec_bs, 3, 48, 320], [args.rec_bs, 3, 48, 2304]) - # 用户可以把TRT引擎文件保存至本地 + # Users could save TRT cache file to disk as follow. det_option.set_trt_cache_file(args.det_model) cls_option.set_trt_cache_file(args.cls_model) rec_option.set_trt_cache_file(args.rec_model) @@ -164,24 +151,28 @@ def build_option(args): cls_option.use_openvino_backend() rec_option.use_openvino_backend() + elif args.backend.lower() == "pplite": + assert args.device.lower( + ) == "cpu", "Paddle Lite backend require inference on device CPU." + det_option.use_lite_backend() + cls_option.use_lite_backend() + rec_option.use_lite_backend() + return det_option, cls_option, rec_option args = parse_arguments() -# Detection模型, 检测文字框 det_model_file = os.path.join(args.det_model, "inference.pdmodel") det_params_file = os.path.join(args.det_model, "inference.pdiparams") -# Classification模型,方向分类,可选 + cls_model_file = os.path.join(args.cls_model, "inference.pdmodel") cls_params_file = os.path.join(args.cls_model, "inference.pdiparams") -# Recognition模型,文字识别模型 + rec_model_file = os.path.join(args.rec_model, "inference.pdmodel") rec_params_file = os.path.join(args.rec_model, "inference.pdiparams") rec_label_file = args.rec_label_file -# 对于三个模型,均采用同样的部署配置 -# 用户也可根据自己的需求,个性化配置 det_option, cls_option, rec_option = build_option(args) det_model = fd.vision.ocr.DBDetector( @@ -193,25 +184,35 @@ cls_model = fd.vision.ocr.Classifier( rec_model = fd.vision.ocr.Recognizer( rec_model_file, rec_params_file, rec_label_file, runtime_option=rec_option) -# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None +# Parameters settings for pre and post processing of Det/Cls/Rec Models. +# All parameters are set to default values. +det_model.preprocessor.max_side_len = 960 +det_model.postprocessor.det_db_thresh = 0.3 +det_model.postprocessor.det_db_box_thresh = 0.6 +det_model.postprocessor.det_db_unclip_ratio = 1.5 +det_model.postprocessor.det_db_score_mode = "slow" +det_model.postprocessor.use_dilation = False +cls_model.postprocessor.cls_thresh = 0.9 + +# Create PP-OCRv3, if cls_model is not needed, just set cls_model=None . ppocr_v3 = fd.vision.ocr.PPOCRv3( det_model=det_model, cls_model=cls_model, rec_model=rec_model) -# 给cls和rec模型设置推理时的batch size -# 此值能为-1, 和1到正无穷 -# 当此值为-1时, cls和rec模型的batch size将默认和det模型检测出的框的数量相同 +# Set inference batch size for cls model and rec model, the value could be -1 and 1 to positive infinity. +# When inference batch size is set to -1, it means that the inference batch size +# of the cls and rec models will be the same as the number of boxes detected by the det model. ppocr_v3.cls_batch_size = args.cls_bs ppocr_v3.rec_batch_size = args.rec_bs -# 预测图片准备 +# Read the input image im = cv2.imread(args.image) -#预测并打印结果 +# Predict and reutrn the results result = ppocr_v3.predict(im) print(result) -# 可视化结果 +# Visuliaze the results. vis_im = fd.vision.vis_ppocr(im, result) cv2.imwrite("visualized_result.jpg", vis_im) print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_cls.py b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_cls.py new file mode 100755 index 0000000000..b34868daef --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_cls.py @@ -0,0 +1,77 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fastdeploy as fd +import cv2 +import os + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--cls_model", + required=True, + help="Path of Classification model of PPOCR.") + parser.add_argument( + "--image", type=str, required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") + parser.add_argument( + "--device_id", + type=int, + default=0, + help="Define which GPU card used to run model.") + return parser.parse_args() + + +def build_option(args): + + cls_option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + cls_option.use_gpu(args.device_id) + + return cls_option + + +args = parse_arguments() + +cls_model_file = os.path.join(args.cls_model, "inference.pdmodel") +cls_params_file = os.path.join(args.cls_model, "inference.pdiparams") + +# Set the runtime option +cls_option = build_option(args) + +# Create the cls_model +cls_model = fd.vision.ocr.Classifier( + cls_model_file, cls_params_file, runtime_option=cls_option) + +# Set the postprocessing parameters +cls_model.postprocessor.cls_thresh = 0.9 + +# Read the image +im = cv2.imread(args.image) + +# Predict and return the results +result = cls_model.predict(im) + +# User can infer a batch of images by following code. +# result = cls_model.batch_predict([im]) + +print(result) diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_det.py b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_det.py new file mode 100755 index 0000000000..7a7f5a07b7 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_det.py @@ -0,0 +1,82 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fastdeploy as fd +import cv2 +import os + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--det_model", required=True, help="Path of Detection model of PPOCR.") + parser.add_argument( + "--image", type=str, required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") + parser.add_argument( + "--device_id", + type=int, + default=0, + help="Define which GPU card used to run model.") + return parser.parse_args() + + +def build_option(args): + + det_option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + det_option.use_gpu(args.device_id) + + return det_option + + +args = parse_arguments() + +det_model_file = os.path.join(args.det_model, "inference.pdmodel") +det_params_file = os.path.join(args.det_model, "inference.pdiparams") + +# Set the runtime option +det_option = build_option(args) + +# Create the det_model +det_model = fd.vision.ocr.DBDetector( + det_model_file, det_params_file, runtime_option=det_option) + +# Set the preporcessing parameters +det_model.preprocessor.max_side_len = 960 +det_model.postprocessor.det_db_thresh = 0.3 +det_model.postprocessor.det_db_box_thresh = 0.6 +det_model.postprocessor.det_db_unclip_ratio = 1.5 +det_model.postprocessor.det_db_score_mode = "slow" +det_model.postprocessor.use_dilation = False + +# Read the image +im = cv2.imread(args.image) + +# Predict and return the results +result = det_model.predict(im) + +print(result) + +# Visualize the results +vis_im = fd.vision.vis_ppocr(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_rec.py b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_rec.py new file mode 100755 index 0000000000..6f9e03b20e --- /dev/null +++ b/examples/vision/ocr/PP-OCR/cpu-gpu/python/infer_rec.py @@ -0,0 +1,79 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fastdeploy as fd +import cv2 +import os + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--rec_model", + required=True, + help="Path of Recognization model of PPOCR.") + parser.add_argument( + "--rec_label_file", + required=True, + help="Path of Recognization model of PPOCR.") + parser.add_argument( + "--image", type=str, required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") + parser.add_argument( + "--device_id", + type=int, + default=0, + help="Define which GPU card used to run model.") + return parser.parse_args() + + +def build_option(args): + + rec_option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + rec_option.use_gpu(args.device_id) + + return rec_option + + +args = parse_arguments() + +rec_model_file = os.path.join(args.rec_model, "inference.pdmodel") +rec_params_file = os.path.join(args.rec_model, "inference.pdiparams") +rec_label_file = args.rec_label_file + +# Set the runtime option +rec_option = build_option(args) + +# Create the rec_model +rec_model = fd.vision.ocr.Recognizer( + rec_model_file, rec_params_file, rec_label_file, runtime_option=rec_option) + +# Read the image +im = cv2.imread(args.image) + +# Predict and return the result +result = rec_model.predict(im) + +# User can infer a batch of images by following code. +# result = rec_model.batch_predict([im]) + +print(result) diff --git a/examples/vision/ocr/PP-OCR/kunlunxin/README.md b/examples/vision/ocr/PP-OCR/kunlunxin/README.md new file mode 100644 index 0000000000..16487674c9 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/kunlunxin/README.md @@ -0,0 +1,32 @@ +[English](README.md) | 简体中文 + +# PaddleOCR 在昆仑芯上部署方案-FastDeploy + +## 1. 说明 +PaddleOCR支持利用FastDeploy在昆仑芯片上部署模型. + +支持如下芯片的部署 +- 昆仑 818-100(推理芯片) +- 昆仑 818-300(训练芯片) + +支持如下芯片的设备 +- K100/K200 昆仑 AI 加速卡 +- R200 昆仑芯 AI 加速卡 + +## 2. 支持的PaddleOCR推理模型 + +下表中的推理模型为FastDeploy测试过的模型, 下载链接由PaddleOCR模型库提供, +更多的模型, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md), 欢迎用户尝试. + +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +|:----|:----|:----|:----|:----|:--------| +| ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | +| en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | +| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | +| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | +| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + + +## 3. 详细部署的部署示例 +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/ocr/PP-OCRv2/cpp/CMakeLists.txt b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/CMakeLists.txt similarity index 71% rename from examples/vision/ocr/PP-OCRv2/cpp/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/kunlunxin/cpp/CMakeLists.txt index 8b2f7aa610..93540a7e83 100644 --- a/examples/vision/ocr/PP-OCRv2/cpp/CMakeLists.txt +++ b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/CMakeLists.txt @@ -12,7 +12,3 @@ include_directories(${FASTDEPLOY_INCS}) add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) # 添加FastDeploy库依赖 target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) - -add_executable(infer_static_shape_demo ${PROJECT_SOURCE_DIR}/infer_static_shape.cc) -# 添加FastDeploy库依赖 -target_link_libraries(infer_static_shape_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/ocr/PP-OCR/kunlunxin/cpp/README.md b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/README.md new file mode 100644 index 0000000000..3725a807e1 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/README.md @@ -0,0 +1,58 @@ +[English](README.md) | 简体中文 +# PP-OCRv3 昆仑芯XPU C++部署示例 + +本目录下提供`infer.cc`, 供用户完成PP-OCRv3在昆仑芯XPU上的部署. + +## 1. 部署环境准备 +在部署前,需自行编译基于昆仑芯XPU的预测库,参考文档[昆仑芯XPU部署环境编译安装](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 3.运行部署示例 +``` +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/kunlunxin/cpp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/kunlunxin/cpp + +mkdir build +cd build +# 使用编译完成的FastDeploy库编译infer_demo +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-kunlunxin +make -j + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg + +``` + +运行完成可视化结果如下图所示 + +
+ +
+ +## 4. 更多指南 +- [PP-OCR系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/cpp/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/cpp/infer_static_shape.cc b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/infer.cc old mode 100755 new mode 100644 similarity index 57% rename from examples/vision/ocr/PP-OCRv3/cpp/infer_static_shape.cc rename to examples/vision/ocr/PP-OCR/kunlunxin/cpp/infer.cc index aea3f5699c..3342b53d16 --- a/examples/vision/ocr/PP-OCRv3/cpp/infer_static_shape.cc +++ b/examples/vision/ocr/PP-OCR/kunlunxin/cpp/infer.cc @@ -19,7 +19,11 @@ const char sep = '\\'; const char sep = '/'; #endif -void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model_dir, const std::string& rec_model_dir, const std::string& rec_label_file, const std::string& image_file, const fastdeploy::RuntimeOption& option) { +void KunlunXinInfer(const std::string &det_model_dir, + const std::string &cls_model_dir, + const std::string &rec_model_dir, + const std::string &rec_label_file, + const std::string &image_file) { auto det_model_file = det_model_dir + sep + "inference.pdmodel"; auto det_params_file = det_model_dir + sep + "inference.pdiparams"; @@ -29,79 +33,83 @@ void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model auto rec_model_file = rec_model_dir + sep + "inference.pdmodel"; auto rec_params_file = rec_model_dir + sep + "inference.pdiparams"; + auto option = fastdeploy::RuntimeOption(); + option.UseKunlunXin(); + auto det_option = option; auto cls_option = option; auto rec_option = option; - auto det_model = fastdeploy::vision::ocr::DBDetector(det_model_file, det_params_file, det_option); - auto cls_model = fastdeploy::vision::ocr::Classifier(cls_model_file, cls_params_file, cls_option); - auto rec_model = fastdeploy::vision::ocr::Recognizer(rec_model_file, rec_params_file, rec_label_file, rec_option); + // The cls and rec model can inference a batch of images now. + // User could initialize the inference batch size and set them after create + // PP-OCR model. + int cls_batch_size = 1; + int rec_batch_size = 6; - // Users could enable static shape infer for rec model when deploy PP-OCR on hardware - // which can not support dynamic shape infer well, like Huawei Ascend series. - rec_model.GetPreprocessor().SetStaticShapeInfer(true); + auto det_model = fastdeploy::vision::ocr::DBDetector( + det_model_file, det_params_file, det_option); + auto cls_model = fastdeploy::vision::ocr::Classifier( + cls_model_file, cls_params_file, cls_option); + auto rec_model = fastdeploy::vision::ocr::Recognizer( + rec_model_file, rec_params_file, rec_label_file, rec_option); assert(det_model.Initialized()); assert(cls_model.Initialized()); assert(rec_model.Initialized()); - // The classification model is optional, so the PP-OCR can also be connected in series as follows + // The classification model is optional, so the PP-OCR can also be connected + // in series as follows // auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &rec_model); - auto ppocr_v3 = fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); + auto ppocr_v3 = + fastdeploy::pipeline::PPOCRv3(&det_model, &cls_model, &rec_model); - // When users enable static shape infer for rec model, the batch size of cls and rec model must to be set to 1. - ppocr_v3.SetClsBatchSize(1); - ppocr_v3.SetRecBatchSize(1); + // Set inference batch size for cls model and rec model, the value could be -1 + // and 1 to positive infinity. + // When inference batch size is set to -1, it means that the inference batch + // size + // of the cls and rec models will be the same as the number of boxes detected + // by the det model. + ppocr_v3.SetClsBatchSize(cls_batch_size); + ppocr_v3.SetRecBatchSize(rec_batch_size); - if(!ppocr_v3.Initialized()){ + if (!ppocr_v3.Initialized()) { std::cerr << "Failed to initialize PP-OCR." << std::endl; return; } auto im = cv::imread(image_file); - + auto im_bak = im.clone(); + fastdeploy::vision::OCRResult result; - if (!ppocr_v3.Predict(im, &result)) { + if (!ppocr_v3.Predict(&im, &result)) { std::cerr << "Failed to predict." << std::endl; return; } std::cout << result.Str() << std::endl; - auto vis_im = fastdeploy::vision::VisOcr(im, result); + auto vis_im = fastdeploy::vision::VisOcr(im_bak, result); cv::imwrite("vis_result.jpg", vis_im); std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; } -int main(int argc, char* argv[]) { - if (argc < 7) { +int main(int argc, char *argv[]) { + if (argc < 6) { std::cout << "Usage: infer_demo path/to/det_model path/to/cls_model " "path/to/rec_model path/to/rec_label_file path/to/image " - "run_option, " "e.g ./infer_demo ./ch_PP-OCRv3_det_infer " "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer " - "./ppocr_keys_v1.txt ./12.jpg 0" - << std::endl; - std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " - "with ascend." + "./ppocr_keys_v1.txt ./12.jpg" << std::endl; return -1; } - fastdeploy::RuntimeOption option; - int flag = std::atoi(argv[6]); - - if (flag == 0) { - option.UseCpu(); - } else if (flag == 1) { - option.UseAscend(); - } - std::string det_model_dir = argv[1]; std::string cls_model_dir = argv[2]; std::string rec_model_dir = argv[3]; std::string rec_label_file = argv[4]; std::string test_image = argv[5]; - InitAndInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, test_image, option); + KunlunXinInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, + test_image); return 0; } diff --git a/examples/vision/ocr/PP-OCR/kunlunxin/python/README.md b/examples/vision/ocr/PP-OCR/kunlunxin/python/README.md new file mode 100644 index 0000000000..724fad2715 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/kunlunxin/python/README.md @@ -0,0 +1,54 @@ +[English](README.md) | 简体中文 +# PP-OCRv3 昆仑芯XPU Python部署示例 + +本目录下提供`infer.py`, 供用户完成PP-OCRv3在昆仑芯XPU上的部署. + +## 1. 部署环境准备 +在部署前,需自行编译基于昆仑XPU的FastDeploy python wheel包并安装,参考文档[昆仑芯XPU部署环境](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. + +## 3.运行部署示例 +``` +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/kunlunxin/python + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/kunlunxin/python + +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 下载预测图片与字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg +``` + +运行完成可视化结果如下图所示 + +
+ +
+ +## 4. 更多指南 +- [PP-OCR系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 C++部署](../cpp) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md) + +## 5. 常见问题 +- [如何将视觉模型预测结果转为numpy格式](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/faq/vision_result_related_problems.md) diff --git a/examples/vision/ocr/PP-OCRv2/python/infer_static_shape.py b/examples/vision/ocr/PP-OCR/kunlunxin/python/infer.py similarity index 73% rename from examples/vision/ocr/PP-OCRv2/python/infer_static_shape.py rename to examples/vision/ocr/PP-OCR/kunlunxin/python/infer.py index 29055fdaae..4780df832c 100755 --- a/examples/vision/ocr/PP-OCRv2/python/infer_static_shape.py +++ b/examples/vision/ocr/PP-OCR/kunlunxin/python/infer.py @@ -38,15 +38,15 @@ def parse_arguments(): parser.add_argument( "--image", type=str, required=True, help="Path of test image file.") parser.add_argument( - "--device", - type=str, - default='cpu', - help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") - parser.add_argument( - "--cpu_thread_num", + "--cls_bs", type=int, - default=9, - help="Number of threads while inference on CPU.") + default=1, + help="Classification model inference batch size.") + parser.add_argument( + "--rec_bs", + type=int, + default=6, + help="Recognition model inference batch size") return parser.parse_args() @@ -56,24 +56,21 @@ def build_option(args): cls_option = fd.RuntimeOption() rec_option = fd.RuntimeOption() - # 当前需要对PP-OCR启用静态shape推理的硬件只有昇腾. - if args.device.lower() == "ascend": - det_option.use_ascend() - cls_option.use_ascend() - rec_option.use_ascend() + det_option.use_kunlunxin() + cls_option.use_kunlunxin() + rec_option.use_kunlunxin() return det_option, cls_option, rec_option args = parse_arguments() -# Detection模型, 检测文字框 det_model_file = os.path.join(args.det_model, "inference.pdmodel") det_params_file = os.path.join(args.det_model, "inference.pdiparams") -# Classification模型,方向分类,可选 + cls_model_file = os.path.join(args.cls_model, "inference.pdmodel") cls_params_file = os.path.join(args.cls_model, "inference.pdiparams") -# Recognition模型,文字识别模型 + rec_model_file = os.path.join(args.rec_model, "inference.pdmodel") rec_params_file = os.path.join(args.rec_model, "inference.pdiparams") rec_label_file = args.rec_label_file @@ -89,26 +86,26 @@ cls_model = fd.vision.ocr.Classifier( rec_model = fd.vision.ocr.Recognizer( rec_model_file, rec_params_file, rec_label_file, runtime_option=rec_option) -# Rec模型启用静态shape推理 -rec_model.preprocessor.static_shape_infer = True - -# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None -ppocr_v2 = fd.vision.ocr.PPOCRv2( +# Create PP-OCRv3, if cls_model is not needed, +# just set cls_model=None . +ppocr_v3 = fd.vision.ocr.PPOCRv3( det_model=det_model, cls_model=cls_model, rec_model=rec_model) -# Cls模型和Rec模型的batch size 必须设置为1, 开启静态shape推理 -ppocr_v2.cls_batch_size = 1 -ppocr_v2.rec_batch_size = 1 +# Set inference batch size for cls model and rec model, the value could be -1 and 1 to positive infinity. +# When inference batch size is set to -1, it means that the inference batch size +# of the cls and rec models will be the same as the number of boxes detected by the det model. +ppocr_v3.cls_batch_size = args.cls_bs +ppocr_v3.rec_batch_size = args.rec_bs -# 预测图片准备 +# Prepare image. im = cv2.imread(args.image) -#预测并打印结果 -result = ppocr_v2.predict(im) +# Print the results. +result = ppocr_v3.predict(im) print(result) -# 可视化结果 +# Visuliaze the output. vis_im = fd.vision.vis_ppocr(im, result) cv2.imwrite("visualized_result.jpg", vis_im) print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/ocr/PP-OCR/rockchip/README.md b/examples/vision/ocr/PP-OCR/rockchip/README.md new file mode 100644 index 0000000000..b38f7f8963 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/README.md @@ -0,0 +1,23 @@ +[English](README.md) | 简体中文 + +# PaddleOCR 模型在RKNPU2上部署方案-FastDeploy + +## 1. 说明 +PaddleOCR支持通过FastDeploy在RKNPU2上部署相关模型. + +## 2. 支持模型列表 + +下表中的模型下载链接由PaddleOCR模型库提供, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) + +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +|:----|:----|:----|:----|:----|:--------| +| ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | +| en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | +| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | +| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | +| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + + +## 3. 详细部署的部署示例 +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/ocr/PP-OCRv3/cpp/CMakeLists.txt b/examples/vision/ocr/PP-OCR/rockchip/cpp/CMakeLists.txt similarity index 71% rename from examples/vision/ocr/PP-OCRv3/cpp/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/rockchip/cpp/CMakeLists.txt index 8b2f7aa610..93540a7e83 100644 --- a/examples/vision/ocr/PP-OCRv3/cpp/CMakeLists.txt +++ b/examples/vision/ocr/PP-OCR/rockchip/cpp/CMakeLists.txt @@ -12,7 +12,3 @@ include_directories(${FASTDEPLOY_INCS}) add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) # 添加FastDeploy库依赖 target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) - -add_executable(infer_static_shape_demo ${PROJECT_SOURCE_DIR}/infer_static_shape.cc) -# 添加FastDeploy库依赖 -target_link_libraries(infer_static_shape_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/ocr/PP-OCR/rockchip/cpp/README.md b/examples/vision/ocr/PP-OCR/rockchip/cpp/README.md new file mode 100755 index 0000000000..f5fb212d94 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/cpp/README.md @@ -0,0 +1,128 @@ +[English](README_CN.md) | 简体中文 +# PP-OCRv3 RKNPU2 C++部署示例 + +本目录下提供`infer.cc`, 供用户完成PP-OCRv3在RKNPU2的部署. + + +## 1. 部署环境准备 +在部署前,需确认以下两个步骤 +- 1. 在部署前,需自行编译基于RKNPU2的预测库,参考文档[RKNPU2部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) +- 2. 同时请用户参考[FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md) + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. +同时, 在RKNPU2上部署PP-OCR系列模型时,我们需要把Paddle的推理模型转为RKNN模型. +由于rknn_toolkit2工具暂不支持直接从Paddle直接转换为RKNN模型,因此我们需要先将Paddle推理模型转为ONNX模型, 最后转为RKNN模型, 示例如下. + +```bash +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 请用户自行安装最新发布版本的paddle2onnx, 转换模型到ONNX格式的模型 +paddle2onnx --model_dir ch_PP-OCRv3_det_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --enable_dev_version True +paddle2onnx --model_dir ch_ppocr_mobile_v2.0_cls_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --enable_dev_version True +paddle2onnx --model_dir ch_PP-OCRv3_rec_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --enable_dev_version True + +# 固定模型的输入shape +python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --output_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --input_shape_dict "{'x':[1,3,960,960]}" +python -m paddle2onnx.optimize --input_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --output_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --input_shape_dict "{'x':[1,3,48,192]}" +python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --output_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --input_shape_dict "{'x':[1,3,48,320]}" + +# 在rockchip/rknpu2_tools/目录下, 我们为用户提供了转换ONNX模型到RKNN模型的工具 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_det.yaml \ + --target_platform rk3588 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_rec.yaml \ + --target_platform rk3588 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_cls.yaml \ + --target_platform rk3588 +``` + +## 3.运行部署示例 +在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.3以上(x.x.x>1.0.3), RKNN版本在1.4.1b22以上。 + +``` +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/rockchip/cpp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/rockchip/cpp + +mkdir build +cd build +# 使用编译完成的FastDeploy库编译infer_demo +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-rockchip +make -j + +# 下载图片和字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 拷贝RKNN模型到build目录 + +# CPU推理 +./infer_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + ./ppocr_keys_v1.txt \ + ./12.jpg \ + 0 +# RKNPU推理 +./infer_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ + ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ + ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ + ./ppocr_keys_v1.txt \ + ./12.jpg \ + 1 +``` + +运行完成可视化结果如下图所示: + + + +结果输出如下: + +```text +det boxes: [[276,174],[285,173],[285,178],[276,179]]rec text: rec score:0.000000 cls label: 1 cls score: 0.766602 +det boxes: [[43,408],[483,390],[483,431],[44,449]]rec text: 上海斯格威铂尔曼大酒店 rec score:0.888450 cls label: 0 cls score: 1.000000 +det boxes: [[186,456],[399,448],[399,480],[186,488]]rec text: 打浦路15号 rec score:0.988769 cls label: 0 cls score: 1.000000 +det boxes: [[18,501],[513,485],[514,537],[18,554]]rec text: 绿洲仕格维花园公寓 rec score:0.992730 cls label: 0 cls score: 1.000000 +det boxes: [[78,553],[404,541],[404,573],[78,585]]rec text: 打浦路252935号 rec score:0.983545 cls label: 0 cls score: 1.000000 +Visualized result saved in ./vis_result.jpg +``` + +## 4. 更多指南 + +- [PP-OCR系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- [FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/cpp/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/infer_static_shape.cc b/examples/vision/ocr/PP-OCR/rockchip/cpp/infer.cc similarity index 100% rename from examples/vision/ocr/PP-OCRv3/rknpu2/cpp/infer_static_shape.cc rename to examples/vision/ocr/PP-OCR/rockchip/cpp/infer.cc diff --git a/examples/vision/ocr/PP-OCR/rockchip/python/README.md b/examples/vision/ocr/PP-OCR/rockchip/python/README.md new file mode 100755 index 0000000000..00d97dd969 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/python/README.md @@ -0,0 +1,112 @@ +[English](README_CN.md) | 简体中文 +# PP-OCRv3 RKNPU2 Python部署示例 +本目录下提供`infer.py`, 供用户完成PP-OCRv3在RKNPU2的部署. + + +## 1. 部署环境准备 +在部署前,需确认以下两个步骤 +- 1. 在部署前,需自行编译基于RKNPU2的Python预测库,参考文档[RKNPU2部署环境编译](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#自行编译安装) +- 2. 同时请用户参考[FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md) + +## 2.部署模型准备 +在部署前, 请准备好您所需要运行的推理模型, 您可以在[FastDeploy支持的PaddleOCR模型列表](../README.md)中下载所需模型. +同时, 在RKNPU2上部署PP-OCR系列模型时,我们需要把Paddle的推理模型转为RKNN模型. +由于rknn_toolkit2工具暂不支持直接从Paddle直接转换为RKNN模型,因此我们需要先将Paddle推理模型转为ONNX模型, 最后转为RKNN模型, 示例如下. + +```bash +# 下载PP-OCRv3文字检测模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar -xvf ch_PP-OCRv3_det_infer.tar +# 下载文字方向分类器模型 +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar +# 下载PP-OCRv3文字识别模型 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar -xvf ch_PP-OCRv3_rec_infer.tar + +# 请用户自行安装最新发布版本的paddle2onnx, 转换模型到ONNX格式的模型 +paddle2onnx --model_dir ch_PP-OCRv3_det_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --enable_dev_version True +paddle2onnx --model_dir ch_ppocr_mobile_v2.0_cls_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --enable_dev_version True +paddle2onnx --model_dir ch_PP-OCRv3_rec_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --enable_dev_version True + +# 固定模型的输入shape +python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --output_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --input_shape_dict "{'x':[1,3,960,960]}" +python -m paddle2onnx.optimize --input_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --output_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --input_shape_dict "{'x':[1,3,48,192]}" +python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --output_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --input_shape_dict "{'x':[1,3,48,320]}" + +# 在rockchip/rknpu2_tools/目录下, 我们为用户提供了转换ONNX模型到RKNN模型的工具 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_det.yaml \ + --target_platform rk3588 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_rec.yaml \ + --target_platform rk3588 +python rockchip/rknpu2_tools/export.py --config_path tools/rknpu2/config/ppocrv3_cls.yaml \ + --target_platform rk3588 +``` + + +## 3.运行部署示例 +在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.3以上(x.x.x>1.0.3), RKNN版本在1.4.1b22以上。 + +``` +# 下载图片和字典文件 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 下载部署示例代码 +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/rockchip/python + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/rockchip/python + + +# CPU推理 +python3 infer.py \ + --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ + --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ + --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ + --rec_label_file ./ppocr_keys_v1.txt \ + --image 12.jpg \ + --device cpu + +# NPU推理 +python3 infer.py \ + --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ + --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ + --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ + --rec_label_file ppocr_keys_v1.txt \ + --image 12.jpg \ + --device npu +``` + +运行完成可视化结果如下图所示 + + +## 4. 更多指南 +- [PP-OCR系列 Python API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/python/html/ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 C++部署](../cpp) +- [FastDeploy RKNPU2资源导航](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install/rknpu2.md) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/python/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/python/infer_static_shape.py b/examples/vision/ocr/PP-OCR/rockchip/python/infer.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/rknpu2/python/infer_static_shape.py rename to examples/vision/ocr/PP-OCR/rockchip/python/infer.py diff --git a/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_cls.yaml b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_cls.yaml new file mode 100644 index 0000000000..197becc2f2 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_cls.yaml @@ -0,0 +1,15 @@ +mean: + - + - 127.5 + - 127.5 + - 127.5 +std: + - + - 127.5 + - 127.5 + - 127.5 +model_path: ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx +outputs_nodes: +do_quantization: False +dataset: +output_folder: "./ch_ppocr_mobile_v2.0_cls_infer" diff --git a/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_det.yaml b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_det.yaml new file mode 100644 index 0000000000..2897c5f74b --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_det.yaml @@ -0,0 +1,15 @@ +mean: + - + - 123.675 + - 116.28 + - 103.53 +std: + - + - 58.395 + - 57.12 + - 57.375 +model_path: ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx +outputs_nodes: +do_quantization: False +dataset: +output_folder: "./ch_PP-OCRv3_det_infer" diff --git a/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_rec.yaml b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_rec.yaml new file mode 100644 index 0000000000..8a22a39a2e --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/config/ppocrv3_rec.yaml @@ -0,0 +1,15 @@ +mean: + - + - 127.5 + - 127.5 + - 127.5 +std: + - + - 127.5 + - 127.5 + - 127.5 +model_path: ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx +outputs_nodes: +do_quantization: False +dataset: +output_folder: "./ch_PP-OCRv3_rec_infer" diff --git a/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/export.py b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/export.py new file mode 100644 index 0000000000..a94b348859 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/rockchip/rknpu2_tools/export.py @@ -0,0 +1,80 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import yaml +import argparse +from rknn.api import RKNN + + +def get_config(): + parser = argparse.ArgumentParser() + parser.add_argument("--verbose", default=True, help="rknntoolkit verbose") + parser.add_argument("--config_path") + parser.add_argument("--target_platform") + args = parser.parse_args() + return args + + +if __name__ == "__main__": + config = get_config() + with open(config.config_path) as file: + file_data = file.read() + yaml_config = yaml.safe_load(file_data) + print(yaml_config) + model = RKNN(config.verbose) + + # Config + mean_values = yaml_config["mean"] + std_values = yaml_config["std"] + model.config( + mean_values=mean_values, + std_values=std_values, + target_platform=config.target_platform) + + # Load ONNX model + if yaml_config["outputs_nodes"] is None: + ret = model.load_onnx(model=yaml_config["model_path"]) + else: + ret = model.load_onnx( + model=yaml_config["model_path"], + outputs=yaml_config["outputs_nodes"]) + assert ret == 0, "Load model failed!" + + # Build model + ret = model.build( + do_quantization=yaml_config["do_quantization"], + dataset=yaml_config["dataset"]) + assert ret == 0, "Build model failed!" + + # Init Runtime + ret = model.init_runtime() + assert ret == 0, "Init runtime environment failed!" + + # Export + if not os.path.exists(yaml_config["output_folder"]): + os.mkdir(yaml_config["output_folder"]) + + name_list = os.path.basename(yaml_config["model_path"]).split(".") + model_base_name = "" + for name in name_list[0:-1]: + model_base_name += name + model_device_name = config.target_platform.lower() + if yaml_config["do_quantization"]: + model_save_name = model_base_name + "_" + model_device_name + "_quantized" + ".rknn" + else: + model_save_name = model_base_name + "_" + model_device_name + "_unquantized" + ".rknn" + ret = model.export_rknn( + os.path.join(yaml_config["output_folder"], model_save_name)) + assert ret == 0, "Export rknn model failed!" + print("Export OK!") diff --git a/examples/vision/ocr/PP-OCR/serving/README.md b/examples/vision/ocr/PP-OCR/serving/README.md new file mode 100644 index 0000000000..1d52fec45e --- /dev/null +++ b/examples/vision/ocr/PP-OCR/serving/README.md @@ -0,0 +1,24 @@ +[English](README.md) | 简体中文 +# PaddleOCR 使用 FastDeploy 服务化部署PP-OCR系列模型 +## 1. FastDeploy 服务化部署介绍 +在线推理作为企业或个人线上部署模型的最后一环,是工业界必不可少的环节,其中最重要的就是服务化推理框架。FastDeploy 目前提供两种服务化部署方式:simple_serving和fastdeploy_serving +- simple_serving:适用于只需要通过http等调用AI推理任务,没有高并发需求的场景。simple_serving基于Flask框架具有简单高效的特点,可以快速验证线上部署模型的可行性 +- fastdeploy_serving:适用于高并发、高吞吐量请求的场景。基于Triton Inference Server框架,是一套可用于实际生产的完备且性能卓越的服务化部署框架 + +## 2. 支持的PaddleOCR推理模型 + +下表中的推理模型为FastDeploy测试过的模型, 下载链接由PaddleOCR模型库提供, +更多的模型, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md), 欢迎用户尝试. + +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +|:----|:----|:----|:----|:----|:--------| +| ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | +| en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | +| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | +| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | +| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + +## 3. 详细的部署示例 + +- [fastdeploy serving](fastdeploy_serving) +- [simple serving](simple_serving) diff --git a/examples/vision/ocr/PP-OCRv3/serving/README_CN.md b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/README.md old mode 100644 new mode 100755 similarity index 61% rename from examples/vision/ocr/PP-OCRv3/serving/README_CN.md rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/README.md index 6b569324dc..7da6ce6fd9 --- a/examples/vision/ocr/PP-OCRv3/serving/README_CN.md +++ b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/README.md @@ -1,18 +1,18 @@ [English](README.md) | 简体中文 -# PP-OCR服务化部署示例 +# PaddleOCR服务化部署示例 -在服务化部署前,需确认 +PaddleOCR 服务化部署示例是利用FastDeploy Serving搭建的服务化部署示例。FastDeploy Serving是基于Triton Inference Server框架封装的适用于高并发、高吞吐量请求的服务化部署框架,是一套可用于实际生产的完备且性能卓越的服务化部署框架。如没有高并发,高吞吐场景的需求,只想快速检验模型线上部署的可行性,请参考[simple_serving](../simple_serving/) -- 1. 服务化镜像的软硬件环境要求和镜像拉取命令请参考[FastDeploy服务化部署](../../../../../serving/README_CN.md) - -## 介绍 -本文介绍了使用FastDeploy搭建OCR文字识别服务的方法. +## 1. 部署环境准备 +在服务化部署前,需确认服务化镜像的软硬件环境要求和镜像拉取命令,请参考[FastDeploy服务化部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/README_CN.md) +## 2. PP-OCRv3服务化部署介绍 +本文介绍了使用FastDeploy搭建PP-OCRv3模型服务的方法. 服务端必须在docker内启动,而客户端不是必须在docker容器内. **本文所在路径($PWD)下的models里包含模型的配置和代码(服务端会加载模型和代码以启动服务), 需要将其映射到docker中使用.** -OCR由det(检测)、cls(分类)和rec(识别)三个模型组成. +PP-OCRv3由det(检测)、cls(分类)和rec(识别)三个模型组成. 服务化部署串联的示意图如下图所示,其中`pp_ocr`串联了`det_preprocess`、`det_runtime`和`det_postprocess`,`cls_pp`串联了`cls_runtime`和`cls_postprocess`,`rec_pp`串联了`rec_runtime`和`rec_postprocess`. @@ -24,13 +24,21 @@ OCR由det(检测)、cls(分类)和rec(识别)三个模型组成.

-## 使用 -### 1. 服务端 -#### 1.1 Docker + +## 3. 服务端的使用 + +### 3.1 下载模型并使用服务化Docker ```bash # 下载仓库代码 +# 下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/serving/ +cd FastDeploy/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/serving/fastdeploy_serving # 下载模型,图片和字典文件 wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar @@ -61,13 +69,13 @@ docker run -dit --net=host --name fastdeploy --shm-size="1g" -v $PWD:/ocr_servin docker exec -it -u root fastdeploy bash ``` -#### 1.2 安装(在docker内) +### 3.2 安装(在docker内) ```bash ldconfig apt-get install libgl1 ``` -#### 1.3 启动服务端(在docker内) +#### 3.3 启动服务端(在docker内) ```bash fastdeployserver --model-repository=/ocr_serving/models ``` @@ -79,26 +87,30 @@ fastdeployserver --model-repository=/ocr_serving/models - `metrics-port`(optional): 服务端指标的端口号. 默认: `8002`. 本示例中未使用该端口. -### 2. 客户端 -#### 2.1 安装 +## 4. 客户端的使用 +### 4.1 安装 ```bash pip3 install tritonclient[all] ``` -#### 2.2 发送请求 +### 4.2 发送请求 ```bash python3 client.py ``` -## 配置修改 - +## 5.配置修改 当前默认配置在GPU上运行, 如果要在CPU或其他推理引擎上运行。 需要修改`models/runtime/config.pbtxt`中配置,详情请参考[配置文档](../../../../../serving/docs/zh_CN/model_configuration.md) -## 使用VisualDL进行可视化部署 +## 6. 其他指南 -可以使用VisualDL进行[Serving可视化部署](../../../../../serving/docs/zh_CN/vdl_management.md),上述启动服务、配置修改以及客户端请求的操作都可以基于VisualDL进行。 +- 使用PP-OCRv2进行服务化部署, 除了自行准备PP-OCRv2模型之外, 只需手动添加一行代码即可. +在[model.py](./models/det_postprocess/1/model.py#L109)文件**109行添加以下代码**: +``` +self.rec_preprocessor.cls_image_shape[1] = 32 +``` -通过VisualDL的可视化界面对PP-OCR进行服务化部署只需要如下三步: +- [使用 VisualDL 进行 Serving 可视化部署](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/vdl_management.md) +通过VisualDL的可视化界面对PP-OCRv3进行服务化部署只需要如下三步: ```text 1. 载入模型库:./vision/ocr/PP-OCRv3/serving 2. 下载模型资源文件:点击det_runtime模型,点击版本号1添加预训练模型,选择文字识别模型ch_PP-OCRv3_det进行下载。点击cls_runtime模型,点击版本号1添加预训练模型,选择文字识别模型ch_ppocr_mobile_v2.0_cls进行下载。点击rec_runtime模型,点击版本号1添加预训练模型,选择文字识别模型ch_PP-OCRv3_rec进行下载。点击rec_postprocess模型,点击版本号1添加预训练模型,选择文字识别模型ch_PP-OCRv3_rec进行下载。 @@ -107,3 +119,9 @@ python3 client.py

+ +## 7. 常见问题 +- [如何编写客户端 HTTP/GRPC 请求](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/client.md) +- [如何编译服务化部署镜像](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/compile.md) +- [服务化部署原理及动态Batch介绍](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/demo.md) +- [模型仓库介绍](https://github.com/PaddlePaddle/FastDeploy/blob/develop/serving/docs/zh_CN/model_repository.md) diff --git a/examples/vision/ocr/PP-OCRv3/serving/client.py b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/client.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/client.py rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/client.py diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/cls_postprocess/1/model.py b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_postprocess/1/model.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/cls_postprocess/1/model.py rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_postprocess/1/model.py diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/cls_postprocess/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_postprocess/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/cls_postprocess/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_postprocess/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/cls_pp/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_pp/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/cls_pp/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_pp/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/cls_runtime/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_runtime/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/cls_runtime/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/cls_runtime/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/det_postprocess/1/model.py b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_postprocess/1/model.py similarity index 99% rename from examples/vision/ocr/PP-OCRv3/serving/models/det_postprocess/1/model.py rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_postprocess/1/model.py index 103d51a072..a2b581e957 100644 --- a/examples/vision/ocr/PP-OCRv3/serving/models/det_postprocess/1/model.py +++ b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_postprocess/1/model.py @@ -190,7 +190,7 @@ class TritonPythonModel: index] > self.cls_threshold: image_list[index] = cv2.rotate( image_list[index].astype(np.float32), 1) - image_list[index] = image_list[index].astype(np.uint8) + image_list[index] = np.astype(np.uint8) rec_pre_tensors = self.rec_preprocessor.run(image_list) rec_dlpack_tensor = rec_pre_tensors[0].to_dlpack() diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/det_postprocess/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_postprocess/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/det_postprocess/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_postprocess/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/det_preprocess/1/model.py b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_preprocess/1/model.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/det_preprocess/1/model.py rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_preprocess/1/model.py diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/det_preprocess/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_preprocess/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/det_preprocess/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_preprocess/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/det_runtime/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_runtime/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/det_runtime/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/det_runtime/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/pp_ocr/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/pp_ocr/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/pp_ocr/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/pp_ocr/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/rec_postprocess/1/model.py b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_postprocess/1/model.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/rec_postprocess/1/model.py rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_postprocess/1/model.py diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/rec_postprocess/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_postprocess/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/rec_postprocess/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_postprocess/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/rec_pp/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_pp/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/rec_pp/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_pp/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/models/rec_runtime/config.pbtxt b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/models/rec_runtime/config.pbtxt rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/models/rec_runtime/config.pbtxt diff --git a/examples/vision/ocr/PP-OCRv3/serving/ppocr.png b/examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/ppocr.png similarity index 100% rename from examples/vision/ocr/PP-OCRv3/serving/ppocr.png rename to examples/vision/ocr/PP-OCR/serving/fastdeploy_serving/ppocr.png diff --git a/examples/vision/ocr/PP-OCR/serving/simple_serving/README.md b/examples/vision/ocr/PP-OCR/serving/simple_serving/README.md new file mode 100644 index 0000000000..913475c79e --- /dev/null +++ b/examples/vision/ocr/PP-OCR/serving/simple_serving/README.md @@ -0,0 +1,54 @@ +简体中文 | [English](README.md) + + +# PaddleOCR Python轻量服务化部署示例 + +PaddleOCR Python轻量服务化部署是FastDeploy基于Flask框架搭建的可快速验证线上模型部署可行性的服务化部署示例,基于http请求完成AI推理任务,适用于无并发推理的简单场景,如有高并发,高吞吐场景的需求请参考[fastdeploy_serving](../fastdeploy_serving/) + + +## 1. 部署环境准备 + +在部署前,需确认软硬件环境,同时下载预编译python wheel 包,参考文档[FastDeploy预编译库安装](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#FastDeploy预编译库安装) + + +## 2. 启动服务 +```bash +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/serving/simple_serving + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/serving/simple_serving + +# 下载模型和字典文件 +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar xvf ch_PP-OCRv3_det_infer.tar + +wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar +tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar + +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar +tar xvf ch_PP-OCRv3_rec_infer.tar + +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt + +# 启动服务,可修改server.py中的配置项来指定硬件、后端等 +# 可通过--host、--port指定IP和端口号 +fastdeploy simple_serving --app server:app +``` + +## 3. 客户端请求 +```bash +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +cd PaddleOCR/deploy/fastdeploy/serving/simple_serving + +# 下载测试图片 +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg + +# 请求服务,获取推理结果(如有必要,请修改脚本中的IP和端口号) +python client.py +``` diff --git a/examples/vision/ocr/PP-OCRv3/python/serving/client.py b/examples/vision/ocr/PP-OCR/serving/simple_serving/client.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/python/serving/client.py rename to examples/vision/ocr/PP-OCR/serving/simple_serving/client.py diff --git a/examples/vision/ocr/PP-OCRv3/python/serving/server.py b/examples/vision/ocr/PP-OCR/serving/simple_serving/server.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/python/serving/server.py rename to examples/vision/ocr/PP-OCR/serving/simple_serving/server.py diff --git a/examples/vision/ocr/PP-OCR/sophgo/README.md b/examples/vision/ocr/PP-OCR/sophgo/README.md new file mode 100644 index 0000000000..9fd2e9563f --- /dev/null +++ b/examples/vision/ocr/PP-OCR/sophgo/README.md @@ -0,0 +1,102 @@ +[English](README.md) | 简体中文 + +# PaddleOCR 模型在SOPHGO上部署方案-FastDeploy + +## 1. 说明 +PaddleOCR支持通过FastDeploy在SOPHGO上部署相关模型. + +## 2.支持模型列表 + +下表中的模型下载链接由PaddleOCR模型库提供, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) + +| PaddleOCR版本 | 文本框检测 | 方向分类模型 | 文字识别 |字典文件| 说明 | +|:----|:----|:----|:----|:----|:--------| +| ch_PP-OCRv3[推荐] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | +| en_PP-OCRv3[推荐] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | +| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | +| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | +| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好| + +## 3. 准备PP-OCR推理模型以及转换模型 + +PP-OCRv3包括文本检测模型(ch_PP-OCRv3_det)、方向分类模型(ch_ppocr_mobile_v2.0_cls)、文字识别模型(ch_PP-OCRv3_rec) +SOPHGO-TPU部署模型前需要将以上Paddle模型转换成bmodel模型,我们以ch_PP-OCRv3_det模型为例,具体步骤如下: +- 下载Paddle模型[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) +- Pddle模型转换为ONNX模型,请参考[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX) +- ONNX模型转换bmodel模型的过程,请参考[TPU-MLIR](https://github.com/sophgo/tpu-mlir) +下面我们提供一个example, 供用户参考,完成模型的转换. + +### 3.1 下载ch_PP-OCRv3_det模型,并转换为ONNX模型 +```shell +wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar +tar xvf ch_PP-OCRv3_det_infer.tar + +# 修改ch_PP-OCRv3_det模型的输入shape,由动态输入变成固定输入 +python paddle_infer_shape.py --model_dir ch_PP-OCRv3_det_infer \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_dir ch_PP-OCRv3_det_infer_fix \ + --input_shape_dict="{'x':[1,3,960,608]}" + +# 请用户自行安装最新发布版本的paddle2onnx, 转换模型到ONNX格式的模型 +paddle2onnx --model_dir ch_PP-OCRv3_det_infer_fix \ + --model_filename inference.pdmodel \ + --params_filename inference.pdiparams \ + --save_file ch_PP-OCRv3_det_infer_fix.onnx \ + --enable_dev_version True +``` + +### 3.2 导出bmodel模型 + +以转换BM1684x的bmodel模型为例子,我们需要下载[TPU-MLIR](https://github.com/sophgo/tpu-mlir)工程,安装过程具体参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)。 +#### 3.2.1 安装 +``` shell +docker pull sophgo/tpuc_dev:latest + +# myname1234是一个示例,也可以设置其他名字 +docker run --privileged --name myname1234 -v $PWD:/workspace -it sophgo/tpuc_dev:latest + +source ./envsetup.sh +./build.sh +``` + +#### 3.2.2 ONNX模型转换为bmodel模型 +``` shell +mkdir ch_PP-OCRv3_det && cd ch_PP-OCRv3_det + +#在该文件中放入测试图片,同时将上一步转换的ch_PP-OCRv3_det_infer_fix.onnx放入该文件夹中 +cp -rf ${REGRESSION_PATH}/dataset/COCO2017 . +cp -rf ${REGRESSION_PATH}/image . +#放入onnx模型文件ch_PP-OCRv3_det_infer_fix.onnx + +mkdir workspace && cd workspace + +#将ONNX模型转换为mlir模型,其中参数--output_names可以通过NETRON查看 +model_transform.py \ + --model_name ch_PP-OCRv3_det \ + --model_def ../ch_PP-OCRv3_det_infer_fix.onnx \ + --input_shapes [[1,3,960,608]] \ + --mean 0.0,0.0,0.0 \ + --scale 0.0039216,0.0039216,0.0039216 \ + --keep_aspect_ratio \ + --pixel_format rgb \ + --output_names sigmoid_0.tmp_0 \ + --test_input ../image/dog.jpg \ + --test_result ch_PP-OCRv3_det_top_outputs.npz \ + --mlir ch_PP-OCRv3_det.mlir + +#将mlir模型转换为BM1684x的F32 bmodel模型 +model_deploy.py \ + --mlir ch_PP-OCRv3_det.mlir \ + --quantize F32 \ + --chip bm1684x \ + --test_input ch_PP-OCRv3_det_in_f32.npz \ + --test_reference ch_PP-OCRv3_det_top_outputs.npz \ + --model ch_PP-OCRv3_det_1684x_f32.bmodel +``` +最终获得可以在BM1684x上能够运行的bmodel模型ch_PP-OCRv3_det_1684x_f32.bmodel。按照上面同样的方法,可以将ch_ppocr_mobile_v2.0_cls,ch_PP-OCRv3_rec转换为bmodel的格式。如果需要进一步对模型进行加速,可以将ONNX模型转换为INT8 bmodel,具体步骤参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)。 + + +## 4. 详细部署的部署示例 +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/cpp/CMakeLists.txt b/examples/vision/ocr/PP-OCR/sophgo/cpp/CMakeLists.txt similarity index 100% rename from examples/vision/ocr/PP-OCRv3/sophgo/cpp/CMakeLists.txt rename to examples/vision/ocr/PP-OCR/sophgo/cpp/CMakeLists.txt diff --git a/examples/vision/ocr/PP-OCR/sophgo/cpp/README.md b/examples/vision/ocr/PP-OCR/sophgo/cpp/README.md new file mode 100644 index 0000000000..0b17f7df87 --- /dev/null +++ b/examples/vision/ocr/PP-OCR/sophgo/cpp/README.md @@ -0,0 +1,66 @@ +[English](README_CN.md) | 简体中文 +# PP-OCRv3 SOPHGO C++部署示例 +本目录下提供`infer.cc`快速完成PPOCRv3模型在SOPHGO BM1684x板子上加速部署的示例。 + +## 1. 部署环境准备 +在部署前,需自行编译基于SOPHGO硬件的预测库,参考文档[SOPHGO硬件部署环境](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#算能硬件部署环境) + +## 2. 生成基本目录文件 + +该例程由以下几个部分组成 +```text +. +├── CMakeLists.txt +├── fastdeploy-sophgo # 编译好的SDK文件夹 +├── image # 存放图片的文件夹 +├── infer.cc +└── model # 存放模型文件的文件夹 +``` + +## 3.部署示例 + +### 3.1 下载部署示例代码 +```bash +# 下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd FastDeploy/examples/vision/ocr/PP-OCR/sophgo/cpp + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/sophgo/cpp +``` + +### 3.2 拷贝bmodel模型文至model文件夹 +将Paddle模型转换为SOPHGO bmodel模型,转换步骤参考[文档](../README.md). 将转换后的SOPHGO bmodel模型文件拷贝至model中. + +### 3.3 准备测试图片至image文件夹,以及字典文件 +```bash +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg +cp 12.jpg image/ + +wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt +``` + +### 3.4 编译example + +```bash +cd build +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-0.0.3 +make +``` + +### 3.5 运行例程 + +```bash +./infer_demo model ./ppocr_keys_v1.txt image/12.jpeg +``` + + +## 4. 更多指南 + +- [PP-OCR系列 C++ API查阅](https://www.paddlepaddle.org.cn/fastdeploy-api-doc/cpp/html/namespacefastdeploy_1_1vision_1_1ocr.html) +- [FastDeploy部署PaddleOCR模型概览](../../) +- [PP-OCRv3 Python部署](../python) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/cpp/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/cpp/infer.cc b/examples/vision/ocr/PP-OCR/sophgo/cpp/infer.cc similarity index 95% rename from examples/vision/ocr/PP-OCRv3/sophgo/cpp/infer.cc rename to examples/vision/ocr/PP-OCR/sophgo/cpp/infer.cc index 168099b53a..181561b39e 100644 --- a/examples/vision/ocr/PP-OCRv3/sophgo/cpp/infer.cc +++ b/examples/vision/ocr/PP-OCR/sophgo/cpp/infer.cc @@ -19,10 +19,10 @@ const char sep = '\\'; const char sep = '/'; #endif -void InitAndInfer(const std::string& det_model_dir, - const std::string& rec_label_file, - const std::string& image_file, - const fastdeploy::RuntimeOption& option) { +void InitAndInfer(const std::string &det_model_dir, + const std::string &rec_label_file, + const std::string &image_file, + const fastdeploy::RuntimeOption &option) { auto det_model_file = det_model_dir + sep + "ch_PP-OCRv3_det_1684x_f32.bmodel"; auto det_params_file = det_model_dir + sep + ""; @@ -114,7 +114,7 @@ void InitAndInfer(const std::string& det_model_dir, std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; } -int main(int argc, char* argv[]) { +int main(int argc, char *argv[]) { if (argc < 4) { std::cout << "Usage: infer_demo path/to/model " "path/to/rec_label_file path/to/image " diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/python/README.md b/examples/vision/ocr/PP-OCR/sophgo/python/README.md similarity index 50% rename from examples/vision/ocr/PP-OCRv3/sophgo/python/README.md rename to examples/vision/ocr/PP-OCR/sophgo/python/README.md index dc9ce310ba..27dbe2694c 100644 --- a/examples/vision/ocr/PP-OCRv3/sophgo/python/README.md +++ b/examples/vision/ocr/PP-OCR/sophgo/python/README.md @@ -1,15 +1,28 @@ -# PPOCRv3 Python部署示例 +[English](README.md) | 简体中文 +# PP-OCRv3 SOPHGO Python部署示例 +本目录下提供`infer.py`快速完成 PP-OCRv3 在SOPHGO TPU上部署的示例。 -在部署前,需确认以下两个步骤 +## 1. 部署环境准备 -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../../docs/cn/build_and_install/sophgo.md) +在部署前,需自行编译基于算能硬件的FastDeploy python wheel包并安装,参考文档[算能硬件部署环境](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/cn/build_and_install#算能硬件部署环境) -本目录下提供`infer.py`快速完成 PPOCRv3 在SOPHGO TPU上部署的示例。执行如下脚本即可完成 +## 2.运行部署示例 + +### 2.1 模型准备 +将Paddle模型转换为SOPHGO bmodel模型, 转换步骤参考[文档](../README.md) + +### 2.2 开始部署 ```bash # 下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/sophgo/python +cd FastDeploy/examples/vision/ocr/PP-OCR/sophgo/python + +# 如果您希望从PaddleOCR下载示例代码,请运行 +git clone https://github.com/PaddlePaddle/PaddleOCR.git +# 注意:如果当前分支找不到下面的fastdeploy测试代码,请切换到dygraph分支 +git checkout dygraph +cd PaddleOCR/deploy/fastdeploy/sophgo/python # 下载图片 wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg @@ -33,6 +46,7 @@ det boxes: [[74,553],[427,542],[428,571],[75,582]]rec text: 打浦路252935号 r 可视化结果保存在sophgo_result.jpg中 ``` -## 其它文档 -- [PPOCRv3 C++部署](../cpp) -- [转换 PPOCRv3 SOPHGO模型文档](../README.md) +## 3. 其它文档 +- [PP-OCRv3 C++部署](../cpp) +- [转换 PP-OCRv3 SOPHGO模型文档](../README.md) +- 如果用户想要调整前后处理超参数、单独使用文字检测识别模型、使用其他模型等,更多详细文档与说明请参考[PP-OCR系列在CPU/GPU上的部署](../../cpu-gpu/cpp/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/python/infer.py b/examples/vision/ocr/PP-OCR/sophgo/python/infer.py similarity index 100% rename from examples/vision/ocr/PP-OCRv3/sophgo/python/infer.py rename to examples/vision/ocr/PP-OCR/sophgo/python/infer.py diff --git a/examples/vision/ocr/PP-OCRv3/web/README_CN.md b/examples/vision/ocr/PP-OCR/web/README.md similarity index 76% rename from examples/vision/ocr/PP-OCRv3/web/README_CN.md rename to examples/vision/ocr/PP-OCR/web/README.md index a383f8c525..5ca9628d30 100644 --- a/examples/vision/ocr/PP-OCRv3/web/README_CN.md +++ b/examples/vision/ocr/PP-OCR/web/README.md @@ -4,19 +4,17 @@ 本节介绍部署PaddleOCR的PP-OCRv3模型在浏览器中运行,以及@paddle-js-models/ocr npm包中的js接口。 -## 前端部署PP-OCRv3模型 +## 1. 前端部署PP-OCRv3模型 +PP-OCRv3模型web demo使用[**参考文档**](https://github.com/PaddlePaddle/FastDeploy/tree/develop/examples/application/js/web_demo) -PP-OCRv3模型web demo使用[**参考文档**](../../../../application/js/web_demo/) - - -## PP-OCRv3 js接口 +## 2. PP-OCRv3 js接口 ``` import * as ocr from "@paddle-js-models/ocr"; await ocr.init(detConfig, recConfig); const res = await ocr.recognize(img, option, postConfig); ``` -ocr模型加载和初始化,其中模型为Paddle.js模型格式,js模型转换方式参考[文档](../../../../application/js/web_demo/README.md) +ocr模型加载和初始化,其中模型为Paddle.js模型格式,js模型转换方式参考[文档](https://github.com/PaddlePaddle/FastDeploy/tree/develop/examples/application/js/web_demo/README.md) **init函数参数** @@ -32,9 +30,4 @@ ocr模型加载和初始化,其中模型为Paddle.js模型格式,js模型转 ## 其它文档 - -- [PP-OCR 系列模型介绍](../../) -- [PP-OCRv3 C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) -- [PP-OCRv3 微信小程序部署文档](../mini_program/) +- [PP-OCRv3 微信小程序部署文档](https://github.com/PaddlePaddle/FastDeploy/tree/develop/examples/application/js/mini_program) diff --git a/examples/vision/ocr/PP-OCRv2/android/README.md b/examples/vision/ocr/PP-OCRv2/android/README.md deleted file mode 100644 index 780a4bbe0d..0000000000 --- a/examples/vision/ocr/PP-OCRv2/android/README.md +++ /dev/null @@ -1,203 +0,0 @@ -English | [简体中文](README_CN.md) -# OCR Text Recognition Android Demo Tutorial - -Real-time OCR text recognition on Android. This demo is easy to use for everyone. For example, you can run your own trained model in the demo. - -## Prepare the Environment - -1. Install Android Studio in your local environment. Refer to [Android Studio Official Website](https://developer.android.com/studio) for detailed tutorial. -2. Prepare an Android phone and turn on the USB debug mode. Opening: `Settings -> Find developer options -> Open developer options and USB debug mode` - -## Deployment Steps - -1. The OCR text recognition Demo is located in the `fastdeploy/examples/vision/ocr/PP-OCRv2/android` -2. Open PP-OCRv2/android project with Android Studio -3. Connect the phone to the computer, turn on USB debug mode and file transfer mode, and connect your phone to Android Studio (allow the phone to install software from USB) - -

-image -

- -> **Attention:** ->> If you encounter an NDK configuration error during import, compilation or running, open ` File > Project Structure > SDK Location` and change the path of SDK configured by the `Andriod SDK location`. - -4. Click the Run button to automatically compile the APP and install it to the phone. (The process will automatically download the pre-compiled FastDeploy Android library and model files. Internet is required). -The final effect is as follows. Figure 1: Install the APP on the phone; Figure 2: The effect after opening the APP. It will automatically recognize and mark the objects in the image; Figure 3: APP setting option. Click setting in the upper right corner and modify your options. - -| APP Icon | APP Effect | APP Settings - | --- | --- | --- | -| ![app_pic](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg) | ![app_res](https://user-images.githubusercontent.com/14995488/203495616-af42a5b7-d3bc-4fce-8d5e-2ed88454f618.jpg) | ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) | - -### PP-OCRv2 Java API Description - -- Model initialized API: The initialized API contains two ways: Firstly, initialize directly through the constructor. Secondly, initialize at the appropriate program node by calling the init function. PP-OCR initialization parameters are as follows. - - modelFile: String. Model file path in paddle format, such as model.pdmodel - - paramFile: String. Parameter file path in paddle format, such as model.pdiparams - - labelFile: String. This optional parameter indicates the path of the label file and is used for visualization. such as ppocr_keys_v1.txt, each line containing one label - - option: RuntimeOption. Optional parameter for model initialization. Default runtime options if the parameter is not passed. Different from other models, PP-OCRv2 contains base models such as DBDetector, Classifier, Recognizer and the pipeline type. - -```java -// Constructor: constructor w/o label file -public DBDetector(String modelFile, String paramsFile); -public DBDetector(String modelFile, String paramsFile, RuntimeOption option); -public Classifier(String modelFile, String paramsFile); -public Classifier(String modelFile, String paramsFile, RuntimeOption option); -public Recognizer(String modelFile, String paramsFile, String labelPath); -public Recognizer(String modelFile, String paramsFile, String labelPath, RuntimeOption option); -public PPOCRv2(); // An empty constructor, which can be initialized by calling init -// Constructor w/o classifier -public PPOCRv2(DBDetector detModel, Recognizer recModel); -public PPOCRv2(DBDetector detModel, Classifier clsModel, Recognizer recModel); -``` -- Model Prediction API: The Model Prediction API contains an API for direct prediction and an API for visualization. In direct prediction, we do not save the image and render the result on Bitmap. Instead, we merely predict the inference result. For prediction and visualization, the results are both predicted and visualized, the visualized images are saved to the specified path, and the visualized results are rendered in Bitmap (Now Bitmap in ARGB8888 format is supported). Afterward, the Bitmap can be displayed on the camera. -```java -// Direct prediction: No image saving and no result rendering to Bitmap -public OCRResult predict(Bitmap ARGB8888Bitmap); -// Prediction and visualization: Predict and visualize the results, save the visualized image to the specified path, and render the visualized results on Bitmap -public OCRResult predict(Bitmap ARGB8888Bitmap, String savedImagePath); -public OCRResult predict(Bitmap ARGB8888Bitmap, boolean rendering); // Render without saving images -``` -- Model resource release API: Call release() API to release model resources. Return true for successful release and false for failure; call initialized() to determine whether the model was initialized successfully, with true indicating successful initialization and false indicating failure. -```java -public boolean release(); // Release native resources -public boolean initialized(); // Check if initialization was successful -``` - -- RuntimeOption settings - -```java -public void enableLiteFp16(); // Enable fp16 accuracy inference -public void disableLiteFP16(); // Disable fp16 accuracy inference -public void enableLiteInt8(); // Enable int8 accuracy inference for quantification models -public void disableLiteInt8(); // Disable int8 accuracy inference -public void setCpuThreadNum(int threadNum); // Set thread numbers -public void setLitePowerMode(LitePowerMode mode); // Set power mode -public void setLitePowerMode(String modeStr); // Set power mode through character string -``` - -- Model OCRResult -```java -public class OCRResult { - public int[][] mBoxes; // The coordinates of all target boxes in a single image. 8 int values represent the 4 coordinate points of the box in the order of bottom left, bottom right, top right and top left - public String[] mText; // Recognized text in multiple text boxes - public float[] mRecScores; // Confidence of the recognized text in the box - public float[] mClsScores; // Confidence of the classification result of the text box - public int[] mClsLabels; // the directional classification of the text box - public boolean mInitialized = false; // Whether the result is valid or not -} -``` -Refer to [api/vision_results/ocr_result.md](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/api/vision_results/ocr_result.md) for C++/Python OCRResult - - -- Model Calling Example 1: Using Constructor -```java -import java.nio.ByteBuffer; -import android.graphics.Bitmap; -import android.opengl.GLES20; - -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; - -// Model path -String detModelFile = "ch_PP-OCRv2_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv2_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv2_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv2_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// Set the RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// Initialize the model -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -PPOCRv2 model = new PPOCRv2(detModel, clsModel, recModel); - -// Read the image: The following is merely the pseudo code to read the Bitmap -ByteBuffer pixelBuffer = ByteBuffer.allocate(width * height * 4); -GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); -Bitmap ARGB8888ImageBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); -ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer); - -// Model Inference -OCRResult result = model.predict(ARGB8888ImageBitmap); - -// Release model resources -model.release(); -``` - -- Model calling example 2: Manually call init at the appropriate program node -```java -// import is as the above ... -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; -// Create an empty model -PPOCRv2 model = new PPOCRv2(); -// Model path -String detModelFile = "ch_PP-OCRv2_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv2_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv2_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv2_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// Set the RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// Use init function for initialization -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -model.init(detModel, clsModel, recModel); -// Bitmap reading, model prediction, and resource release are as above -``` -Refer to [OcrMainActivity](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java) for more details - -## Replace FastDeploy SDK and Models -It’s simple to replace the FastDeploy prediction library and models. The prediction library is located at `app/libs/fastdeploy-android-sdk-xxx.aar`, where `xxx` represents the version of your prediction library. The models are located at `app/src/main/assets/models` -- Replace the FastDeploy Android SDK: Download or compile the latest FastDeploy Android SDK, unzip and place it in the `app/libs` directory; For detailed configuration, refer to - - [FastDeploy Java SDK in Android](../../../../../java/android/) - -- Steps to replace OCR models: - - Put your OCR model in `app/src/main/assets/models`; - - Modify the default value of the model path in `app/src/main/res/values/strings.xml`. For example, -```xml - -models -labels/ppocr_keys_v1.txt -``` - -## More Reference Documents -For more FastDeploy Java API documentes and how to access FastDeploy C++ API via JNI, refer to: -- [FastDeploy Java SDK in Android](../../../../../java/android/) -- [FastDeploy C++ SDK in Android](../../../../../docs/en/faq/use_cpp_sdk_on_android.md) diff --git a/examples/vision/ocr/PP-OCRv2/android/README_CN.md b/examples/vision/ocr/PP-OCRv2/android/README_CN.md deleted file mode 100644 index 4e501ae124..0000000000 --- a/examples/vision/ocr/PP-OCRv2/android/README_CN.md +++ /dev/null @@ -1,203 +0,0 @@ -[English](README.md) | 简体中文 -# OCR文字识别 Android Demo 使用文档 - -在 Android 上实现实时的OCR文字识别功能,此 Demo 有很好的的易用性和开放性,如在 Demo 中跑自己训练好的模型等。 - -## 环境准备 - -1. 在本地环境安装好 Android Studio 工具,详细安装方法请见[Android Stuido 官网](https://developer.android.com/studio)。 -2. 准备一部 Android 手机,并开启 USB 调试模式。开启方法: `手机设置 -> 查找开发者选项 -> 打开开发者选项和 USB 调试模式` - -## 部署步骤 - -1. OCR文字识别 Demo 位于 `fastdeploy/examples/vision/ocr/PP-OCRv2/android` 目录 -2. 用 Android Studio 打开 PP-OCRv2/android 工程 -3. 手机连接电脑,打开 USB 调试和文件传输模式,并在 Android Studio 上连接自己的手机设备(手机需要开启允许从 USB 安装软件权限) - -

-image -

- -> **注意:** ->> 如果您在导入项目、编译或者运行过程中遇到 NDK 配置错误的提示,请打开 ` File > Project Structure > SDK Location`,修改 `Andriod SDK location` 为您本机配置的 SDK 所在路径。 - -4. 点击 Run 按钮,自动编译 APP 并安装到手机。(该过程会自动下载预编译的 FastDeploy Android 库 以及 模型文件,需要联网) - 成功后效果如下,图一:APP 安装到手机;图二: APP 打开后的效果,会自动识别图片中的物体并标记;图三:APP设置选项,点击右上角的设置图片,可以设置不同选项进行体验。 - -| APP 图标 | APP 效果 | APP设置项 - | --- | --- | --- | -| ![app_pic](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg) | ![app_res](https://user-images.githubusercontent.com/14995488/203495616-af42a5b7-d3bc-4fce-8d5e-2ed88454f618.jpg) | ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) | - -### PP-OCRv2 Java API 说明 - -- 模型初始化 API: 模型初始化API包含两种方式,方式一是通过构造函数直接初始化;方式二是,通过调用init函数,在合适的程序节点进行初始化。 PP-OCR初始化参数说明如下: - - modelFile: String, paddle格式的模型文件路径,如 model.pdmodel - - paramFile: String, paddle格式的参数文件路径,如 model.pdiparams - - labelFile: String, 可选参数,表示label标签文件所在路径,用于可视化,如 ppocr_keys_v1.txt,每一行包含一个label - - option: RuntimeOption,可选参数,模型初始化option。如果不传入该参数则会使用默认的运行时选项。 - 与其他模型不同的是,PP-OCRv2 包含 DBDetector、Classifier和Recognizer等基础模型,以及pipeline类型。 -```java -// 构造函数: constructor w/o label file -public DBDetector(String modelFile, String paramsFile); -public DBDetector(String modelFile, String paramsFile, RuntimeOption option); -public Classifier(String modelFile, String paramsFile); -public Classifier(String modelFile, String paramsFile, RuntimeOption option); -public Recognizer(String modelFile, String paramsFile, String labelPath); -public Recognizer(String modelFile, String paramsFile, String labelPath, RuntimeOption option); -public PPOCRv2(); // 空构造函数,之后可以调用init初始化 -// Constructor w/o classifier -public PPOCRv2(DBDetector detModel, Recognizer recModel); -public PPOCRv2(DBDetector detModel, Classifier clsModel, Recognizer recModel); -``` -- 模型预测 API:模型预测API包含直接预测的API以及带可视化功能的API。直接预测是指,不保存图片以及不渲染结果到Bitmap上,仅预测推理结果。预测并且可视化是指,预测结果以及可视化,并将可视化后的图片保存到指定的途径,以及将可视化结果渲染在Bitmap(目前支持ARGB8888格式的Bitmap), 后续可将该Bitmap在camera中进行显示。 -```java -// 直接预测:不保存图片以及不渲染结果到Bitmap上 -public OCRResult predict(Bitmap ARGB8888Bitmap); -// 预测并且可视化:预测结果以及可视化,并将可视化后的图片保存到指定的途径,以及将可视化结果渲染在Bitmap上 -public OCRResult predict(Bitmap ARGB8888Bitmap, String savedImagePath); -public OCRResult predict(Bitmap ARGB8888Bitmap, boolean rendering); // 只渲染 不保存图片 -``` -- 模型资源释放 API:调用 release() API 可以释放模型资源,返回true表示释放成功,false表示失败;调用 initialized() 可以判断模型是否初始化成功,true表示初始化成功,false表示失败。 -```java -public boolean release(); // 释放native资源 -public boolean initialized(); // 检查是否初始化成功 -``` - -- RuntimeOption设置说明 - -```java -public void enableLiteFp16(); // 开启fp16精度推理 -public void disableLiteFP16(); // 关闭fp16精度推理 -public void enableLiteInt8(); // 开启int8精度推理,针对量化模型 -public void disableLiteInt8(); // 关闭int8精度推理 -public void setCpuThreadNum(int threadNum); // 设置线程数 -public void setLitePowerMode(LitePowerMode mode); // 设置能耗模式 -public void setLitePowerMode(String modeStr); // 通过字符串形式设置能耗模式 -``` - -- 模型结果OCRResult说明 -```java -public class OCRResult { - public int[][] mBoxes; // 表示单张图片检测出来的所有目标框坐标,每个框以8个int数值依次表示框的4个坐标点,顺序为左下,右下,右上,左上 - public String[] mText; // 表示多个文本框内被识别出来的文本内容 - public float[] mRecScores; // 表示文本框内识别出来的文本的置信度 - public float[] mClsScores; // 表示文本框的分类结果的置信度 - public int[] mClsLabels; // 表示文本框的方向分类类别 - public boolean mInitialized = false; // 检测结果是否有效 -} -``` -其他参考:C++/Python对应的OCRResult说明: [api/vision_results/ocr_result.md](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/api/vision_results/ocr_result.md) - - -- 模型调用示例1:使用构造函数 -```java -import java.nio.ByteBuffer; -import android.graphics.Bitmap; -import android.opengl.GLES20; - -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; - -// 模型路径 -String detModelFile = "ch_PP-OCRv2_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv2_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv2_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv2_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// 设置RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// 初始化模型 -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -PPOCRv2 model = new PPOCRv2(detModel, clsModel, recModel); - -// 读取图片: 以下仅为读取Bitmap的伪代码 -ByteBuffer pixelBuffer = ByteBuffer.allocate(width * height * 4); -GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); -Bitmap ARGB8888ImageBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); -ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer); - -// 模型推理 -OCRResult result = model.predict(ARGB8888ImageBitmap); - -// 释放模型资源 -model.release(); -``` - -- 模型调用示例2: 在合适的程序节点,手动调用init -```java -// import 同上 ... -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; -// 新建空模型 -PPOCRv2 model = new PPOCRv2(); -// 模型路径 -String detModelFile = "ch_PP-OCRv2_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv2_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv2_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv2_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// 设置RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// 使用init函数初始化 -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -model.init(detModel, clsModel, recModel); -// Bitmap读取、模型预测、资源释放 同上 ... -``` -更详细的用法请参考 [OcrMainActivity](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java)中的用法 - -## 替换 FastDeploy SDK和模型 -替换FastDeploy预测库和模型的步骤非常简单。预测库所在的位置为 `app/libs/fastdeploy-android-sdk-xxx.aar`,其中 `xxx` 表示当前您使用的预测库版本号。模型所在的位置为,`app/src/main/assets/models`。 -- 替换FastDeploy Android SDK: 下载或编译最新的FastDeploy Android SDK,解压缩后放在 `app/libs` 目录下;详细配置文档可参考: - - [在 Android 中使用 FastDeploy Java SDK](../../../../../java/android/) - -- 替换OCR模型的步骤: - - 将您的OCR模型放在 `app/src/main/assets/models` 目录下; - - 修改 `app/src/main/res/values/strings.xml` 中模型路径的默认值,如: -```xml - -models -labels/ppocr_keys_v1.txt -``` - -## 更多参考文档 -如果您想知道更多的FastDeploy Java API文档以及如何通过JNI来接入FastDeploy C++ API感兴趣,可以参考以下内容: -- [在 Android 中使用 FastDeploy Java SDK](../../../../../java/android/) -- [在 Android 中使用 FastDeploy C++ SDK](../../../../../docs/cn/faq/use_cpp_sdk_on_android.md) diff --git a/examples/vision/ocr/PP-OCRv2/android/app/build.gradle b/examples/vision/ocr/PP-OCRv2/android/app/build.gradle deleted file mode 100644 index 6c2585f174..0000000000 --- a/examples/vision/ocr/PP-OCRv2/android/app/build.gradle +++ /dev/null @@ -1,125 +0,0 @@ -import java.security.MessageDigest - -apply plugin: 'com.android.application' - -android { - compileSdk 28 - - defaultConfig { - applicationId 'com.baidu.paddle.fastdeploy.app.examples' - minSdkVersion 15 - //noinspection ExpiredTargetSdkVersion - targetSdkVersion 28 - versionCode 1 - versionName "1.0" - testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" - } - - buildTypes { - release { - minifyEnabled false - proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' - } - } - -} - -dependencies { - implementation fileTree(include: ['*.aar'], dir: 'libs') - implementation 'com.android.support:appcompat-v7:28.0.0' - //noinspection GradleDependency - implementation 'com.android.support.constraint:constraint-layout:1.1.3' - implementation 'com.android.support:design:28.0.0' - implementation 'org.jetbrains:annotations:15.0' - //noinspection GradleDependency - testImplementation 'junit:junit:4.12' - androidTestImplementation 'com.android.support.test:runner:1.0.2' - androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' -} - -def FD_MODEL = [ - [ - 'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/ch_PP-OCRv2_det_infer.tgz', - 'dest': 'src/main/assets/models' - ], - [ - 'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/ch_ppocr_mobile_v2.0_cls_infer.tgz', - 'dest': 'src/main/assets/models' - ], - [ - 'src' : 'https://bj.bcebos.com/paddlehub/fastdeploy/ch_PP-OCRv2_rec_infer.tgz', - 'dest': 'src/main/assets/models' - ], -] - -def FD_JAVA_SDK = [ - [ - 'src' : 'https://bj.bcebos.com/fastdeploy/test/fastdeploy-android-sdk-latest-dev.aar', - 'dest': 'libs' - ] -] - -task downloadAndExtractModels(type: DefaultTask) { - doFirst { - println "Downloading and extracting fastdeploy models ..." - } - doLast { - String cachePath = "cache" - if (!file("${cachePath}").exists()) { - mkdir "${cachePath}" - } - FD_MODEL.eachWithIndex { model, index -> - MessageDigest messageDigest = MessageDigest.getInstance('MD5') - messageDigest.update(model.src.bytes) - String[] modelPaths = model.src.split("/") - String modelName = modelPaths[modelPaths.length - 1] - // Download the target model if not exists - boolean copyFiles = !file("${model.dest}").exists() - if (!file("${cachePath}/${modelName}").exists()) { - println "Downloading ${model.src} -> ${cachePath}/${modelName}" - ant.get(src: model.src, dest: file("${cachePath}/${modelName}")) - copyFiles = true - } - if (copyFiles) { - println "Coping ${cachePath}/${modelName} -> ${model.dest}" - copy { - from tarTree("${cachePath}/${modelName}") - into "${model.dest}" - } - } - } - } -} - -task downloadAndExtractSDKs(type: DefaultTask) { - doFirst { - println "Downloading and extracting fastdeploy android java sdk ..." - } - doLast { - String cachePath = "cache" - if (!file("${cachePath}").exists()) { - mkdir "${cachePath}" - } - FD_JAVA_SDK.eachWithIndex { sdk, index -> - String[] sdkPaths = sdk.src.split("/") - String sdkName = sdkPaths[sdkPaths.length - 1] - // Download the target SDK if not exists - boolean copyFiles = !file("${sdk.dest}/${sdkName}").exists() - if (!file("${cachePath}/${sdkName}").exists()) { - println "Downloading ${sdk.src} -> ${cachePath}/${sdkName}" - ant.get(src: sdk.src, dest: file("${cachePath}/${sdkName}")) - copyFiles = true - } - if (copyFiles) { - println "Coping ${cachePath}/${sdkName} -> ${sdk.dest}/${sdkName}" - copy { - from "${cachePath}/${sdkName}" - into "${sdk.dest}" - } - } - } - } -} - -preBuild.dependsOn downloadAndExtractSDKs -preBuild.dependsOn downloadAndExtractModels \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java b/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java deleted file mode 100644 index 4dc1885b3b..0000000000 --- a/examples/vision/ocr/PP-OCRv2/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java +++ /dev/null @@ -1,500 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.examples.ocr; - -import static com.baidu.paddle.fastdeploy.app.ui.Utils.decodeBitmap; -import static com.baidu.paddle.fastdeploy.app.ui.Utils.getRealPathFromURI; - -import android.Manifest; -import android.annotation.SuppressLint; -import android.app.Activity; -import android.app.AlertDialog; -import android.content.DialogInterface; -import android.content.Intent; -import android.content.SharedPreferences; -import android.content.pm.PackageManager; -import android.graphics.Bitmap; -import android.net.Uri; -import android.os.Bundle; -import android.os.SystemClock; -import android.preference.PreferenceManager; -import android.support.annotation.NonNull; -import android.support.v4.app.ActivityCompat; -import android.support.v4.content.ContextCompat; -import android.view.View; -import android.view.ViewGroup; -import android.view.Window; -import android.view.WindowManager; -import android.widget.ImageButton; -import android.widget.ImageView; -import android.widget.SeekBar; -import android.widget.TextView; - -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.app.examples.R; -import com.baidu.paddle.fastdeploy.app.ui.view.CameraSurfaceView; -import com.baidu.paddle.fastdeploy.app.ui.view.ResultListView; -import com.baidu.paddle.fastdeploy.app.ui.Utils; -import com.baidu.paddle.fastdeploy.app.ui.view.adapter.BaseResultAdapter; -import com.baidu.paddle.fastdeploy.app.ui.view.model.BaseResultModel; -import com.baidu.paddle.fastdeploy.pipeline.PPOCRv2; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.Visualize; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; - -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.List; - -public class OcrMainActivity extends Activity implements View.OnClickListener, CameraSurfaceView.OnTextureChangedListener { - private static final String TAG = OcrMainActivity.class.getSimpleName(); - - CameraSurfaceView svPreview; - TextView tvStatus; - ImageButton btnSwitch; - ImageButton btnShutter; - ImageButton btnSettings; - ImageView realtimeToggleButton; - boolean isRealtimeStatusRunning = false; - ImageView backInPreview; - private ImageView albumSelectButton; - private View cameraPageView; - private ViewGroup resultPageView; - private ImageView resultImage; - private ImageView backInResult; - private SeekBar confidenceSeekbar; - private TextView seekbarText; - private float resultNum = 1.0f; - private ResultListView resultView; - private Bitmap picBitmap; - private Bitmap shutterBitmap; - private Bitmap originPicBitmap; - private Bitmap originShutterBitmap; - private boolean isShutterBitmapCopied = false; - - public static final int TYPE_UNKNOWN = -1; - public static final int BTN_SHUTTER = 0; - public static final int ALBUM_SELECT = 1; - public static final int REALTIME_DETECT = 2; - private static int TYPE = REALTIME_DETECT; - - private static final int REQUEST_PERMISSION_CODE_STORAGE = 101; - private static final int INTENT_CODE_PICK_IMAGE = 100; - private static final int TIME_SLEEP_INTERVAL = 50; // ms - - long timeElapsed = 0; - long frameCounter = 0; - - // Call 'init' and 'release' manually later - PPOCRv2 predictor = new PPOCRv2(); - - private String[] texts; - private float[] recScores; - private boolean initialized; - private List results = new ArrayList<>(); - - @Override - protected void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - - // Fullscreen - requestWindowFeature(Window.FEATURE_NO_TITLE); - getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); - - setContentView(R.layout.ocr_activity_main); - - // Clear all setting items to avoid app crashing due to the incorrect settings - initSettings(); - - // Check and request CAMERA and WRITE_EXTERNAL_STORAGE permissions - if (!checkAllPermissions()) { - requestAllPermissions(); - } - - // Init the camera preview and UI components - initView(); - } - - @SuppressLint("NonConstantResourceId") - @Override - public void onClick(View v) { - switch (v.getId()) { - case R.id.btn_switch: - svPreview.switchCamera(); - break; - case R.id.btn_shutter: - TYPE = BTN_SHUTTER; - shutterAndPauseCamera(); - resultView.setAdapter(null); - break; - case R.id.btn_settings: - startActivity(new Intent(OcrMainActivity.this, OcrSettingsActivity.class)); - break; - case R.id.realtime_toggle_btn: - toggleRealtimeStyle(); - break; - case R.id.back_in_preview: - finish(); - break; - case R.id.iv_select: - TYPE = ALBUM_SELECT; - // Judge whether authority has been granted. - if (ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { - // If this permission was requested before the application but the user refused the request, this method will return true. - ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_PERMISSION_CODE_STORAGE); - } else { - Intent intent = new Intent(Intent.ACTION_PICK); - intent.setType("image/*"); - startActivityForResult(intent, INTENT_CODE_PICK_IMAGE); - } - resultView.setAdapter(null); - break; - case R.id.back_in_result: - back(); - break; - } - } - - @Override - public void onBackPressed() { - super.onBackPressed(); - back(); - } - - private void back() { - resultPageView.setVisibility(View.GONE); - cameraPageView.setVisibility(View.VISIBLE); - TYPE = REALTIME_DETECT; - isShutterBitmapCopied = false; - svPreview.onResume(); - results.clear(); - if (texts != null) { - texts = null; - } - if (recScores != null) { - recScores = null; - } - } - - private void shutterAndPauseCamera() { - new Thread(new Runnable() { - @Override - public void run() { - try { - // Sleep some times to ensure picture has been correctly shut. - Thread.sleep(TIME_SLEEP_INTERVAL * 10); // 500ms - } catch (InterruptedException e) { - e.printStackTrace(); - } - runOnUiThread(new Runnable() { - @SuppressLint("SetTextI18n") - public void run() { - // These code will run in main thread. - svPreview.onPause(); - cameraPageView.setVisibility(View.GONE); - resultPageView.setVisibility(View.VISIBLE); - seekbarText.setText(resultNum + ""); - confidenceSeekbar.setProgress((int) (resultNum * 100)); - if (shutterBitmap != null && !shutterBitmap.isRecycled()) { - resultImage.setImageBitmap(shutterBitmap); - } else { - new AlertDialog.Builder(OcrMainActivity.this) - .setTitle("Empty Result!") - .setMessage("Current picture is empty, please shutting it again!") - .setCancelable(true) - .show(); - } - } - }); - - } - }).start(); - } - - private void copyBitmapFromCamera(Bitmap ARGB8888ImageBitmap) { - if (isShutterBitmapCopied || ARGB8888ImageBitmap == null) { - return; - } - if (!ARGB8888ImageBitmap.isRecycled()) { - synchronized (this) { - shutterBitmap = ARGB8888ImageBitmap.copy(Bitmap.Config.ARGB_8888, true); - originShutterBitmap = ARGB8888ImageBitmap.copy(Bitmap.Config.ARGB_8888, true); - } - SystemClock.sleep(TIME_SLEEP_INTERVAL); - isShutterBitmapCopied = true; - } - } - - - @Override - protected void onActivityResult(int requestCode, int resultCode, Intent data) { - super.onActivityResult(requestCode, resultCode, data); - if (requestCode == INTENT_CODE_PICK_IMAGE) { - if (resultCode == Activity.RESULT_OK) { - cameraPageView.setVisibility(View.GONE); - resultPageView.setVisibility(View.VISIBLE); - seekbarText.setText(resultNum + ""); - confidenceSeekbar.setProgress((int) (resultNum * 100)); - Uri uri = data.getData(); - String path = getRealPathFromURI(this, uri); - picBitmap = decodeBitmap(path, 720, 1280); - originPicBitmap = picBitmap.copy(Bitmap.Config.ARGB_8888, true); - resultImage.setImageBitmap(picBitmap); - } - } - } - - private void toggleRealtimeStyle() { - if (isRealtimeStatusRunning) { - isRealtimeStatusRunning = false; - realtimeToggleButton.setImageResource(R.drawable.realtime_stop_btn); - svPreview.setOnTextureChangedListener(this); - tvStatus.setVisibility(View.VISIBLE); - } else { - isRealtimeStatusRunning = true; - realtimeToggleButton.setImageResource(R.drawable.realtime_start_btn); - tvStatus.setVisibility(View.GONE); - isShutterBitmapCopied = false; - svPreview.setOnTextureChangedListener(new CameraSurfaceView.OnTextureChangedListener() { - @Override - public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) { - if (TYPE == BTN_SHUTTER) { - copyBitmapFromCamera(ARGB8888ImageBitmap); - } - return false; - } - }); - } - } - - @Override - public boolean onTextureChanged(Bitmap ARGB8888ImageBitmap) { - if (TYPE == BTN_SHUTTER) { - copyBitmapFromCamera(ARGB8888ImageBitmap); - return false; - } - - boolean modified = false; - - long tc = System.currentTimeMillis(); - OCRResult result = predictor.predict(ARGB8888ImageBitmap); - timeElapsed += (System.currentTimeMillis() - tc); - - Visualize.visOcr(ARGB8888ImageBitmap, result); - modified = result.initialized(); - - frameCounter++; - if (frameCounter >= 30) { - final int fps = (int) (1000 / (timeElapsed / 30)); - runOnUiThread(new Runnable() { - @SuppressLint("SetTextI18n") - public void run() { - tvStatus.setText(Integer.toString(fps) + "fps"); - } - }); - frameCounter = 0; - timeElapsed = 0; - } - return modified; - } - - @Override - protected void onResume() { - super.onResume(); - // Reload settings and re-initialize the predictor - checkAndUpdateSettings(); - // Open camera until the permissions have been granted - if (!checkAllPermissions()) { - svPreview.disableCamera(); - } else { - svPreview.enableCamera(); - } - svPreview.onResume(); - } - - @Override - protected void onPause() { - super.onPause(); - svPreview.onPause(); - } - - @Override - protected void onDestroy() { - if (predictor != null) { - predictor.release(); - } - super.onDestroy(); - } - - public void initView() { - TYPE = REALTIME_DETECT; - svPreview = (CameraSurfaceView) findViewById(R.id.sv_preview); - svPreview.setOnTextureChangedListener(this); - tvStatus = (TextView) findViewById(R.id.tv_status); - btnSwitch = (ImageButton) findViewById(R.id.btn_switch); - btnSwitch.setOnClickListener(this); - btnShutter = (ImageButton) findViewById(R.id.btn_shutter); - btnShutter.setOnClickListener(this); - btnSettings = (ImageButton) findViewById(R.id.btn_settings); - btnSettings.setOnClickListener(this); - realtimeToggleButton = findViewById(R.id.realtime_toggle_btn); - realtimeToggleButton.setOnClickListener(this); - backInPreview = findViewById(R.id.back_in_preview); - backInPreview.setOnClickListener(this); - albumSelectButton = findViewById(R.id.iv_select); - albumSelectButton.setOnClickListener(this); - cameraPageView = findViewById(R.id.camera_page); - resultPageView = findViewById(R.id.result_page); - resultImage = findViewById(R.id.result_image); - backInResult = findViewById(R.id.back_in_result); - backInResult.setOnClickListener(this); - confidenceSeekbar = findViewById(R.id.confidence_seekbar); - seekbarText = findViewById(R.id.seekbar_text); - resultView = findViewById(R.id.result_list_view); - - confidenceSeekbar.setMax(100); - confidenceSeekbar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { - @Override - public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { - float resultConfidence = seekBar.getProgress() / 100f; - BigDecimal bd = new BigDecimal(resultConfidence); - resultNum = bd.setScale(1, BigDecimal.ROUND_HALF_UP).floatValue(); - seekbarText.setText(resultNum + ""); - confidenceSeekbar.setProgress((int) (resultNum * 100)); - results.clear(); - } - - @Override - public void onStartTrackingTouch(SeekBar seekBar) { - - } - - @Override - public void onStopTrackingTouch(SeekBar seekBar) { - runOnUiThread(new Runnable() { - @Override - public void run() { - if (TYPE == ALBUM_SELECT) { - SystemClock.sleep(TIME_SLEEP_INTERVAL * 10); - detail(picBitmap); - picBitmap = originPicBitmap.copy(Bitmap.Config.ARGB_8888, true); - } else { - SystemClock.sleep(TIME_SLEEP_INTERVAL * 10); - detail(shutterBitmap); - shutterBitmap = originShutterBitmap.copy(Bitmap.Config.ARGB_8888, true); - } - } - }); - } - }); - } - - private void detail(Bitmap bitmap) { - OCRResult result = predictor.predict(bitmap, true); - - texts = result.mText; - recScores = result.mRecScores; - - initialized = result.initialized(); - if (initialized) { - for (int i = 0; i < texts.length; i++) { - if (recScores[i] > resultNum) { - results.add(new BaseResultModel(i + 1, texts[i], recScores[i])); - } - } - } - BaseResultAdapter adapter = new BaseResultAdapter(getBaseContext(), R.layout.ocr_result_page_item, results); - resultView.setAdapter(adapter); - resultView.invalidate(); - - resultImage.setImageBitmap(bitmap); - resultNum = 1.0f; - } - - @SuppressLint("ApplySharedPref") - public void initSettings() { - SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this); - SharedPreferences.Editor editor = sharedPreferences.edit(); - editor.clear(); - editor.commit(); - OcrSettingsActivity.resetSettings(); - } - - public void checkAndUpdateSettings() { - if (OcrSettingsActivity.checkAndUpdateSettings(this)) { - String realModelDir = getCacheDir() + "/" + OcrSettingsActivity.modelDir; - String detModelName = "ch_PP-OCRv2_det_infer"; - // String detModelName = "ch_ppocr_mobile_v2.0_det_infer"; - String clsModelName = "ch_ppocr_mobile_v2.0_cls_infer"; - // String recModelName = "ch_ppocr_mobile_v2.0_rec_infer"; - String recModelName = "ch_PP-OCRv2_rec_infer"; - String realDetModelDir = realModelDir + "/" + detModelName; - String realClsModelDir = realModelDir + "/" + clsModelName; - String realRecModelDir = realModelDir + "/" + recModelName; - String srcDetModelDir = OcrSettingsActivity.modelDir + "/" + detModelName; - String srcClsModelDir = OcrSettingsActivity.modelDir + "/" + clsModelName; - String srcRecModelDir = OcrSettingsActivity.modelDir + "/" + recModelName; - Utils.copyDirectoryFromAssets(this, srcDetModelDir, realDetModelDir); - Utils.copyDirectoryFromAssets(this, srcClsModelDir, realClsModelDir); - Utils.copyDirectoryFromAssets(this, srcRecModelDir, realRecModelDir); - String realLabelPath = getCacheDir() + "/" + OcrSettingsActivity.labelPath; - Utils.copyFileFromAssets(this, OcrSettingsActivity.labelPath, realLabelPath); - - String detModelFile = realDetModelDir + "/" + "inference.pdmodel"; - String detParamsFile = realDetModelDir + "/" + "inference.pdiparams"; - String clsModelFile = realClsModelDir + "/" + "inference.pdmodel"; - String clsParamsFile = realClsModelDir + "/" + "inference.pdiparams"; - String recModelFile = realRecModelDir + "/" + "inference.pdmodel"; - String recParamsFile = realRecModelDir + "/" + "inference.pdiparams"; - String recLabelFilePath = realLabelPath; // ppocr_keys_v1.txt - RuntimeOption detOption = new RuntimeOption(); - RuntimeOption clsOption = new RuntimeOption(); - RuntimeOption recOption = new RuntimeOption(); - detOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); - clsOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); - recOption.setCpuThreadNum(OcrSettingsActivity.cpuThreadNum); - detOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); - clsOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); - recOption.setLitePowerMode(OcrSettingsActivity.cpuPowerMode); - if (Boolean.parseBoolean(OcrSettingsActivity.enableLiteFp16)) { - detOption.enableLiteFp16(); - clsOption.enableLiteFp16(); - recOption.enableLiteFp16(); - } - DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); - Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); - Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); - predictor.init(detModel, clsModel, recModel); - - } - } - - @Override - public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, - @NonNull int[] grantResults) { - super.onRequestPermissionsResult(requestCode, permissions, grantResults); - if (grantResults[0] != PackageManager.PERMISSION_GRANTED || grantResults[1] != PackageManager.PERMISSION_GRANTED) { - new AlertDialog.Builder(OcrMainActivity.this) - .setTitle("Permission denied") - .setMessage("Click to force quit the app, then open Settings->Apps & notifications->Target " + - "App->Permissions to grant all of the permissions.") - .setCancelable(false) - .setPositiveButton("Exit", new DialogInterface.OnClickListener() { - @Override - public void onClick(DialogInterface dialog, int which) { - OcrMainActivity.this.finish(); - } - }).show(); - } - } - - private void requestAllPermissions() { - ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE, - Manifest.permission.CAMERA}, 0); - } - - private boolean checkAllPermissions() { - return ContextCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED - && ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED; - } -} diff --git a/examples/vision/ocr/PP-OCRv2/android/local.properties b/examples/vision/ocr/PP-OCRv2/android/local.properties deleted file mode 100644 index 01bb38fb05..0000000000 --- a/examples/vision/ocr/PP-OCRv2/android/local.properties +++ /dev/null @@ -1,8 +0,0 @@ -## This file must *NOT* be checked into Version Control Systems, -# as it contains information specific to your local configuration. -# -# Location of the SDK. This is only used by Gradle. -# For customization when using a Version Control System, please read the -# header note. -#Tue Nov 22 15:46:21 CST 2022 -sdk.dir=D\:\\androidsdk diff --git a/examples/vision/ocr/PP-OCRv2/c/README.md b/examples/vision/ocr/PP-OCRv2/c/README.md deleted file mode 100755 index d8721ae51b..0000000000 --- a/examples/vision/ocr/PP-OCRv2/c/README.md +++ /dev/null @@ -1,251 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv2 C Deployment Example - -This directory provides `infer.c` to finish the deployment of PPOCRv2 on CPU/GPU. - -Before deployment, two steps require confirmation - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Taking inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model. - - -```bash -mkdir build -cd build -# Download the FastDeploy precompiled library. Users can choose your appropriate version in the `FastDeploy Precompiled Library` mentioned above -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU inference -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -The above command works for Linux or MacOS. For SDK in Windows, refer to: -- [How to use FastDeploy C++ SDK in Windows](../../../../../docs/en/faq/use_sdk_on_windows.md) - -The visualized result after running is as follows - - - - - -## PPOCRv2 C Interface - -### RuntimeOption - -```c -FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper() -``` - -> Create a RuntimeOption object, and return a pointer to manipulate it. -> -> **Return** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - - -```c -void FD_C_RuntimeOptionWrapperUseCpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper) -``` - -> Enable Cpu inference. -> -> **Params** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - -```c -void FD_C_RuntimeOptionWrapperUseGpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - int gpu_id) -``` -> Enable Gpu inference. -> -> **Params** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - -> * **gpu_id**(int): gpu id - - -### Model - -```c - -FD_C_DBDetectorWrapper* FD_C_CreateDBDetectorWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) - -``` - -> Create a DBDetector model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> * **fd_c_dbdetector_wrapper**(FD_C_DBDetectorWrapper*): Pointer to manipulate DBDetector object. - -```c -FD_C_ClassifierWrapper* FD_C_CreateClassifierWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> Create a Classifier model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> -> * **fd_c_classifier_wrapper**(FD_C_ClassifierWrapper*): Pointer to manipulate Classifier object. - -```c -FD_C_RecognizerWrapper* FD_C_CreateRecognizerWrapper( - const char* model_file, const char* params_file, const char* label_path, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> Create a Recognizer model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **label_path**(const char*): Label file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> * **fd_c_recognizer_wrapper**(FD_C_RecognizerWrapper*): Pointer to manipulate Recognizer object. - -```c -FD_C_PPOCRv2Wrapper* FD_C_CreatePPOCRv2Wrapper( - FD_C_DBDetectorWrapper* det_model, - FD_C_ClassifierWrapper* cls_model, - FD_C_RecognizerWrapper* rec_model -) -``` -> Create a PPOCRv2 model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector model -> * **cls_model**(FD_C_ClassifierWrapper*): Classifier model -> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer model -> -> **Return** -> -> * **fd_c_ppocrv2_wrapper**(FD_C_PPOCRv2Wrapper*): Pointer to manipulate PPOCRv2 object. - - - -#### Read and write image - -```c -FD_C_Mat FD_C_Imread(const char* imgpath) -``` - -> Read an image, and return a pointer to cv::Mat. -> -> **Params** -> -> * **imgpath**(const char*): image path -> -> **Return** -> -> * **imgmat**(FD_C_Mat): pointer to cv::Mat object which holds the image. - - -```c -FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img); -``` - -> Write image to a file. -> -> **Params** -> -> * **savepath**(const char*): save path -> * **img**(FD_C_Mat): pointer to cv::Mat object -> -> **Return** -> -> * **result**(FD_C_Bool): bool to indicate success or failure - - - -#### Prediction - -```c -FD_C_Bool FD_C_PPOCRv2WrapperPredict( - FD_C_PPOCRv2Wrapper* fd_c_ppocrv2_wrapper, - FD_C_Mat img, - FD_C_OCRResult* result) -``` -> -> Predict an image, and generate result. -> -> **Params** -> * **fd_c_ppocrv2_wrapper**(FD_C_PPOCRv2Wrapper*): Pointer to manipulate PPOCRv2 object. -> * **img**(FD_C_Mat): pointer to cv::Mat object, which can be obained by FD_C_Imread interface -> * **result**(FD_C_OCRResult*): OCR prediction results, including the position of the detection box from the detection model, the classification of the direction from the classification model, and the recognition result from the recognition model. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for OCRResult - -#### Result - -```c -FD_C_Mat FD_C_VisOcr(FD_C_Mat im, FD_C_OCRResult* ocr_result) -``` -> -> Visualize OCR results and return visualization image. -> -> **Params** -> * **im**(FD_C_Mat): pointer to input image -> * **ocr_result**(FD_C_OCRResult*): pointer to C FD_C_OCRResult structure -> -> **Return** -> * **vis_im**(FD_C_Mat): pointer to visualization image. - - - - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv2 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/c/README_CN.md b/examples/vision/ocr/PP-OCRv2/c/README_CN.md deleted file mode 100644 index e017a89414..0000000000 --- a/examples/vision/ocr/PP-OCRv2/c/README_CN.md +++ /dev/null @@ -1,251 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv2 C部署示例 - -本目录下提供`infer.c`来调用C API快速完成PPOCRv2模型在CPU/GPU上部署的示例。 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -以Linux上推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4) - -```bash -mkdir build -cd build -# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考: -- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) - -如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前初始化部署环境: -- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md) - -运行完成可视化结果如下图所示 - - - - -## PPOCRv2 C API接口 - -### 配置 - -```c -FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper() -``` - -> 创建一个RuntimeOption的配置对象,并且返回操作它的指针。 -> -> **返回** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针 - - -```c -void FD_C_RuntimeOptionWrapperUseCpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper) -``` - -> 开启CPU推理 -> -> **参数** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针 - -```c -void FD_C_RuntimeOptionWrapperUseGpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - int gpu_id) -``` -> 开启GPU推理 -> -> **参数** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption对象的指针 -> * **gpu_id**(int): 显卡号 - - -### 模型 - -```c - -FD_C_DBDetectorWrapper* FD_C_CreateDBDetectorWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) - -``` - -> 创建一个DBDetector的模型,并且返回操作它的指针。 -> -> **参数** -> -> * **model_file**(const char*): 模型文件路径 -> * **params_file**(const char*): 参数文件路径 -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption的指针,表示后端推理配置 -> * **model_format**(FD_C_ModelFormat): 模型格式 -> -> **返回** -> * **fd_c_dbdetector_wrapper**(FD_C_DBDetectorWrapper*): 指向DBDetector模型对象的指针 - -```c -FD_C_ClassifierWrapper* FD_C_CreateClassifierWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> 创建一个Classifier的模型,并且返回操作它的指针。 -> -> **参数** -> -> * **model_file**(const char*): 模型文件路径 -> * **params_file**(const char*): 参数文件路径 -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption的指针,表示后端推理配置 -> * **model_format**(FD_C_ModelFormat): 模型格式 -> -> **返回** -> -> * **fd_c_classifier_wrapper**(FD_C_ClassifierWrapper*): 指向Classifier模型对象的指针 - -```c -FD_C_RecognizerWrapper* FD_C_CreateRecognizerWrapper( - const char* model_file, const char* params_file, const char* label_path, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> 创建一个Recognizer的模型,并且返回操作它的指针。 -> -> **参数** -> -> * **model_file**(const char*): 模型文件路径 -> * **params_file**(const char*): 参数文件路径 -> * **label_path**(const char*): 标签文件路径 -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): 指向RuntimeOption的指针,表示后端推理配置 -> * **model_format**(FD_C_ModelFormat): 模型格式 -> -> **返回** -> * **fd_c_recognizer_wrapper**(FD_C_RecognizerWrapper*): 指向Recognizer模型对象的指针 - -```c -FD_C_PPOCRv2Wrapper* FD_C_CreatePPOCRv2Wrapper( - FD_C_DBDetectorWrapper* det_model, - FD_C_ClassifierWrapper* cls_model, - FD_C_RecognizerWrapper* rec_model -) -``` -> 创建一个PPOCRv2的模型,并且返回操作它的指针。 -> -> **参数** -> -> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector模型 -> * **cls_model**(FD_C_ClassifierWrapper*): Classifier模型 -> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer模型 -> -> **返回** -> -> * **fd_c_ppocrv2_wrapper**(FD_C_PPOCRv2Wrapper*): 指向PPOCRv2模型对象的指针 - - - -#### 读写图像 - -```c -FD_C_Mat FD_C_Imread(const char* imgpath) -``` - -> 读取一个图像,并且返回cv::Mat的指针。 -> -> **参数** -> -> * **imgpath**(const char*): 图像文件路径 -> -> **返回** -> -> * **imgmat**(FD_C_Mat): 指向图像数据cv::Mat的指针。 - - -```c -FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img); -``` - -> 将图像写入文件中。 -> -> **参数** -> -> * **savepath**(const char*): 保存图像的路径 -> * **img**(FD_C_Mat): 指向图像数据的指针 -> -> **返回** -> -> * **result**(FD_C_Bool): 表示操作是否成功 - - -#### Predict函数 - -```c -FD_C_Bool FD_C_PPOCRv2WrapperPredict( - FD_C_PPOCRv2Wrapper* fd_c_ppocrv2_wrapper, - FD_C_Mat img, - FD_C_OCRResult* result) -``` -> -> 模型预测接口,输入图像直接并生成结果。 -> -> **参数** -> * **fd_c_ppocrv2_wrapper**(FD_C_PPOCRv2Wrapper*): 指向PPOCRv2模型的指针 -> * **img**(FD_C_Mat): 输入图像的指针,指向cv::Mat对象,可以调用FD_C_Imread读取图像获取 -> * **result**(FD_C_OCRResult*): OCR预测结果,包括由检测模型输出的检测框位置,分类模型输出的方向分类,以及识别模型输出的识别结果, OCRResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) - - -#### Predict结果 - -```c -FD_C_Mat FD_C_VisOcr(FD_C_Mat im, FD_C_OCRResult* ocr_result) -``` -> -> 对结果进行可视化,返回可视化的图像。 -> -> **参数** -> * **im**(FD_C_Mat): 指向输入图像的指针 -> * **ocr_result**(FD_C_OCRResult*): 指向 FD_C_OCRResult结构的指针 -> -> **返回** -> * **vis_im**(FD_C_Mat): 指向可视化图像的指针 - - - - -## 其它文档 - -- [PPOCR 系列模型介绍](../../) -- [PPOCRv2 Python部署](../python) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/c/infer.c b/examples/vision/ocr/PP-OCRv2/c/infer.c deleted file mode 100644 index 7eddb7b4b9..0000000000 --- a/examples/vision/ocr/PP-OCRv2/c/infer.c +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include - -#include "fastdeploy_capi/vision.h" - -#ifdef WIN32 -const char sep = '\\'; -#else -const char sep = '/'; -#endif - -void CpuInfer(const char* det_model_dir, const char* cls_model_dir, - const char* rec_model_dir, const char* rec_label_file, - const char* image_file) { - char det_model_file[100]; - char det_params_file[100]; - - char cls_model_file[100]; - char cls_params_file[100]; - - char rec_model_file[100]; - char rec_params_file[100]; - - int max_size = 99; - snprintf(det_model_file, max_size, "%s%c%s", det_model_dir, sep, - "inference.pdmodel"); - snprintf(det_params_file, max_size, "%s%c%s", det_model_dir, sep, - "inference.pdiparams"); - - snprintf(cls_model_file, max_size, "%s%c%s", cls_model_dir, sep, - "inference.pdmodel"); - snprintf(cls_params_file, max_size, "%s%c%s", cls_model_dir, sep, - "inference.pdiparams"); - - snprintf(rec_model_file, max_size, "%s%c%s", rec_model_dir, sep, - "inference.pdmodel"); - snprintf(rec_params_file, max_size, "%s%c%s", rec_model_dir, sep, - "inference.pdiparams"); - - FD_C_RuntimeOptionWrapper* det_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* cls_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* rec_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapperUseCpu(det_option); - FD_C_RuntimeOptionWrapperUseCpu(cls_option); - FD_C_RuntimeOptionWrapperUseCpu(rec_option); - - FD_C_DBDetectorWrapper* det_model = FD_C_CreateDBDetectorWrapper( - det_model_file, det_params_file, det_option, FD_C_ModelFormat_PADDLE); - FD_C_ClassifierWrapper* cls_model = FD_C_CreateClassifierWrapper( - cls_model_file, cls_params_file, cls_option, FD_C_ModelFormat_PADDLE); - FD_C_RecognizerWrapper* rec_model = FD_C_CreateRecognizerWrapper( - rec_model_file, rec_params_file, rec_label_file, rec_option, - FD_C_ModelFormat_PADDLE); - - FD_C_PPOCRv2Wrapper* ppocr_v2 = - FD_C_CreatePPOCRv2Wrapper(det_model, cls_model, rec_model); - if (!FD_C_PPOCRv2WrapperInitialized(ppocr_v2)) { - printf("Failed to initialize.\n"); - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - return; - } - - FD_C_Mat im = FD_C_Imread(image_file); - - FD_C_OCRResult* result = FD_C_CreateOCRResult(); - - if (!FD_C_PPOCRv2WrapperPredict(ppocr_v2, im, result)) { - printf("Failed to predict.\n"); - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - FD_C_DestroyMat(im); - free(result); - return; - } - - // print res - char res[2000]; - FD_C_OCRResultStr(result, res); - printf("%s", res); - FD_C_Mat vis_im = FD_C_VisOcr(im, result); - FD_C_Imwrite("vis_result.jpg", vis_im); - printf("Visualized result saved in ./vis_result.jpg\n"); - - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - FD_C_DestroyOCRResult(result); - FD_C_DestroyMat(im); - FD_C_DestroyMat(vis_im); -} - -void GpuInfer(const char* det_model_dir, const char* cls_model_dir, - const char* rec_model_dir, const char* rec_label_file, - const char* image_file) { - char det_model_file[100]; - char det_params_file[100]; - - char cls_model_file[100]; - char cls_params_file[100]; - - char rec_model_file[100]; - char rec_params_file[100]; - - int max_size = 99; - snprintf(det_model_file, max_size, "%s%c%s", det_model_dir, sep, - "inference.pdmodel"); - snprintf(det_params_file, max_size, "%s%c%s", det_model_dir, sep, - "inference.pdiparams"); - - snprintf(cls_model_file, max_size, "%s%c%s", cls_model_dir, sep, - "inference.pdmodel"); - snprintf(cls_params_file, max_size, "%s%c%s", cls_model_dir, sep, - "inference.pdiparams"); - - snprintf(rec_model_file, max_size, "%s%c%s", rec_model_dir, sep, - "inference.pdmodel"); - snprintf(rec_params_file, max_size, "%s%c%s", rec_model_dir, sep, - "inference.pdiparams"); - - FD_C_RuntimeOptionWrapper* det_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* cls_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapper* rec_option = FD_C_CreateRuntimeOptionWrapper(); - FD_C_RuntimeOptionWrapperUseGpu(det_option, 0); - FD_C_RuntimeOptionWrapperUseGpu(cls_option, 0); - FD_C_RuntimeOptionWrapperUseGpu(rec_option, 0); - - FD_C_DBDetectorWrapper* det_model = FD_C_CreateDBDetectorWrapper( - det_model_file, det_params_file, det_option, FD_C_ModelFormat_PADDLE); - FD_C_ClassifierWrapper* cls_model = FD_C_CreateClassifierWrapper( - cls_model_file, cls_params_file, cls_option, FD_C_ModelFormat_PADDLE); - FD_C_RecognizerWrapper* rec_model = FD_C_CreateRecognizerWrapper( - rec_model_file, rec_params_file, rec_label_file, rec_option, - FD_C_ModelFormat_PADDLE); - - FD_C_PPOCRv2Wrapper* ppocr_v2 = - FD_C_CreatePPOCRv2Wrapper(det_model, cls_model, rec_model); - if (!FD_C_PPOCRv2WrapperInitialized(ppocr_v2)) { - printf("Failed to initialize.\n"); - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - return; - } - - FD_C_Mat im = FD_C_Imread(image_file); - - FD_C_OCRResult* result = FD_C_CreateOCRResult(); - - if (!FD_C_PPOCRv2WrapperPredict(ppocr_v2, im, result)) { - printf("Failed to predict.\n"); - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - FD_C_DestroyMat(im); - free(result); - return; - } - - // print res - char res[2000]; - FD_C_OCRResultStr(result, res); - printf("%s", res); - FD_C_Mat vis_im = FD_C_VisOcr(im, result); - FD_C_Imwrite("vis_result.jpg", vis_im); - printf("Visualized result saved in ./vis_result.jpg\n"); - - FD_C_DestroyRuntimeOptionWrapper(det_option); - FD_C_DestroyRuntimeOptionWrapper(cls_option); - FD_C_DestroyRuntimeOptionWrapper(rec_option); - FD_C_DestroyClassifierWrapper(cls_model); - FD_C_DestroyDBDetectorWrapper(det_model); - FD_C_DestroyRecognizerWrapper(rec_model); - FD_C_DestroyPPOCRv2Wrapper(ppocr_v2); - FD_C_DestroyOCRResult(result); - FD_C_DestroyMat(im); - FD_C_DestroyMat(vis_im); -} -int main(int argc, char* argv[]) { - if (argc < 7) { - printf( - "Usage: infer_demo path/to/det_model path/to/cls_model " - "path/to/rec_model path/to/rec_label_file path/to/image " - "run_option, " - "e.g ./infer_demo ./ch_PP-OCRv2_det_infer " - "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer " - "./ppocr_keys_v1.txt ./12.jpg 0\n"); - printf( - "The data type of run_option is int, 0: run with cpu; 1: run with gpu" - "\n"); - return -1; - } - - if (atoi(argv[6]) == 0) { - CpuInfer(argv[1], argv[2], argv[3], argv[4], argv[5]); - } else if (atoi(argv[6]) == 1) { - GpuInfer(argv[1], argv[2], argv[3], argv[4], argv[5]); - } - return 0; -} diff --git a/examples/vision/ocr/PP-OCRv2/cpp/README.md b/examples/vision/ocr/PP-OCRv2/cpp/README.md deleted file mode 100755 index 99748afba5..0000000000 --- a/examples/vision/ocr/PP-OCRv2/cpp/README.md +++ /dev/null @@ -1,157 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv2 C++ Deployment Example - -This directory provides examples that `infer.cc` fast finishes the deployment of PPOCRv2 on CPU/GPU and GPU accelerated by TensorRT. - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Taking the CPU inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 0.7.0 or above (x.x.x>=0.7.0) is required to support this model. - -``` -mkdir build -cd build -# Download the FastDeploy precompiled library. Users can choose your appropriate version in the `FastDeploy Precompiled Library` mentioned above -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU inference -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -# TensorRT inference on GPU -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 2 -# Paddle-TRT inference on GPU -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 3 -# KunlunXin XPU inference -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 4 -``` - -The above command works for Linux or MacOS. For SDK in Windows, refer to: -- [How to use FastDeploy C++ SDK in Windows](../../../../../docs/en/faq/use_sdk_on_windows.md) - -The visualized result after running is as follows - - - - -## PPOCRv2 C++ Interface - -### PPOCRv2 Class - -``` -fastdeploy::pipeline::PPOCRv2(fastdeploy::vision::ocr::DBDetector* det_model, - fastdeploy::vision::ocr::Classifier* cls_model, - fastdeploy::vision::ocr::Recognizer* rec_model); -``` - -The initialization of PPOCRv2, consisting of detection, classification and recognition models - -**Parameter** - -> * **DBDetector**(model): Detection model in OCR -> * **Classifier**(model): Classification model in OCR -> * **Recognizer**(model): Recognition model in OCR - -``` -fastdeploy::pipeline::PPOCRv2(fastdeploy::vision::ocr::DBDetector* det_model, - fastdeploy::vision::ocr::Recognizer* rec_model); -``` -The initialization of PPOCRv2, consisting of detection and recognition models (No classifier) - -**Parameter** - -> * **DBDetector**(model): Detection model in OCROCR中的检测模型 -> * **Recognizer**(model): Recognition model in OCR - -#### Predict Function - -> ``` -> bool Predict(cv::Mat* img, fastdeploy::vision::OCRResult* result); -> bool Predict(const cv::Mat& img, fastdeploy::vision::OCRResult* result); -> ``` -> -> Model prediction interface. Input images and output OCR prediction results -> -> **Parameter** -> -> > * **img**: Input images in HWC or BGR format -> > * **result**: OCR prediction results, including the position of the detection box from the detection model, the classification of the direction from the classification model, and the recognition result from the recognition model. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for OCRResult - - -## DBDetector C++ Interface - -### DBDetector Class - -``` -fastdeploy::vision::ocr::DBDetector(const std::string& model_file, const std::string& params_file = "", - const RuntimeOption& custom_option = RuntimeOption(), - const ModelFormat& model_format = ModelFormat::PADDLE); -``` - -DBDetector model loading and initialization. The model is in paddle format. - -**Parameter** - -> * **model_file**(str): Model file path -> * **params_file**(str): Parameter file path. Merely passing an empty string when the model is in ONNX format -> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(ModelFormat): Model format. Paddle format by default - -### The same applies to Classifier Class - -### Recognizer Class -``` - Recognizer(const std::string& model_file, - const std::string& params_file = "", - const std::string& label_path = "", - const RuntimeOption& custom_option = RuntimeOption(), - const ModelFormat& model_format = ModelFormat::PADDLE); -``` -For the initialization of the Recognizer class, users should input the label file required by the recognition model in the label_path parameter. Other parameters are the same as the DBDetector class - -**Parameter** -> * **label_path**(str): The label path of the recognition model - - -### Class Member Variable -#### DBDetector Pre-processing Parameter -Users can modify the following pre-processing parameters to their needs, which affects the final inference and deployment results - -> > * **max_side_len**(int): The long side’s maximum size of the oriented view before detection. The long side will be resized to this size when exceeding the value. And the short side will be scaled in equal proportion. Default 960 -> > * **det_db_thresh**(double): The binarization threshold of the prediction image from DB models. Default 0.3 -> > * **det_db_box_thresh**(double): The threshold for the output box of DB models, below which the predicted box is discarded. Default 0.6 -> > * **det_db_unclip_ratio**(double): The expansion ratio of the DB model output box. Default 1.5 -> > * **det_db_score_mode**(string): The way to calculate the average score of the text box in DB post-processing. Default slow, which is identical to the calculation of the polygon area’s average score -> > * **use_dilation**(bool): Whether to expand the feature map from the detection. Default False - -#### Classifier Pre-processing Parameter -Users can modify the following pre-processing parameters to their needs, which affects the final inference and deployment results - -> > * **cls_thresh**(double): The input image will be flipped when the score output by the classification model exceeds this threshold. Default 0.9 - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv2 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/cpp/README_CN.md b/examples/vision/ocr/PP-OCRv2/cpp/README_CN.md deleted file mode 100644 index ec8b0c16b3..0000000000 --- a/examples/vision/ocr/PP-OCRv2/cpp/README_CN.md +++ /dev/null @@ -1,162 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv2 C++部署示例 - -本目录下提供`infer.cc`快速完成PPOCRv2在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本0.7.0以上(x.x.x>=0.7.0) - -``` -mkdir build -cd build -# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -# GPU上TensorRT推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 2 -# GPU上Paddle-TRT推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 3 -# 昆仑芯XPU推理 -./infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 4 -# 华为昇腾推理, 需要使用静态shape的demo, 若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸 -./infer_static_shape_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考: -- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) - -如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前初始化部署环境: -- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md) - -运行完成可视化结果如下图所示 - - - - -## PPOCRv2 C++接口 - -### PPOCRv2类 - -``` -fastdeploy::pipeline::PPOCRv2(fastdeploy::vision::ocr::DBDetector* det_model, - fastdeploy::vision::ocr::Classifier* cls_model, - fastdeploy::vision::ocr::Recognizer* rec_model); -``` - -PPOCRv2 的初始化,由检测,分类和识别模型串联构成 - -**参数** - -> * **DBDetector**(model): OCR中的检测模型 -> * **Classifier**(model): OCR中的分类模型 -> * **Recognizer**(model): OCR中的识别模型 - -``` -fastdeploy::pipeline::PPOCRv2(fastdeploy::vision::ocr::DBDetector* det_model, - fastdeploy::vision::ocr::Recognizer* rec_model); -``` -PPOCRv2 的初始化,由检测,识别模型串联构成(无分类器) - -**参数** - -> * **DBDetector**(model): OCR中的检测模型 -> * **Recognizer**(model): OCR中的识别模型 - -#### Predict函数 - -> ``` -> bool Predict(cv::Mat* img, fastdeploy::vision::OCRResult* result); -> bool Predict(const cv::Mat& img, fastdeploy::vision::OCRResult* result); -> ``` -> -> 模型预测接口,输入一张图片,返回OCR预测结果 -> -> **参数** -> -> > * **img**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: OCR预测结果,包括由检测模型输出的检测框位置,分类模型输出的方向分类,以及识别模型输出的识别结果, OCRResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) - - -## DBDetector C++接口 - -### DBDetector类 - -``` -fastdeploy::vision::ocr::DBDetector(const std::string& model_file, const std::string& params_file = "", - const RuntimeOption& custom_option = RuntimeOption(), - const ModelFormat& model_format = ModelFormat::PADDLE); -``` - -DBDetector模型加载和初始化,其中模型为paddle模型格式。 - -**参数** - -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(ModelFormat): 模型格式,默认为Paddle格式 - -### Classifier类与DBDetector类相同 - -### Recognizer类 -``` - Recognizer(const std::string& model_file, - const std::string& params_file = "", - const std::string& label_path = "", - const RuntimeOption& custom_option = RuntimeOption(), - const ModelFormat& model_format = ModelFormat::PADDLE); -``` -Recognizer类初始化时,需要在label_path参数中,输入识别模型所需的label文件,其他参数均与DBDetector类相同 - -**参数** -> * **label_path**(str): 识别模型的label文件路径 - - -### 类成员变量 -#### DBDetector预处理参数 -用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 - -> > * **max_side_len**(int): 检测算法前向时图片长边的最大尺寸,当长边超出这个值时会将长边resize到这个大小,短边等比例缩放,默认为960 -> > * **det_db_thresh**(double): DB模型输出预测图的二值化阈值,默认为0.3 -> > * **det_db_box_thresh**(double): DB模型输出框的阈值,低于此值的预测框会被丢弃,默认为0.6 -> > * **det_db_unclip_ratio**(double): DB模型输出框扩大的比例,默认为1.5 -> > * **det_db_score_mode**(string):DB后处理中计算文本框平均得分的方式,默认为slow,即求polygon区域的平均分数的方式 -> > * **use_dilation**(bool):是否对检测输出的feature map做膨胀处理,默认为Fasle - -#### Classifier预处理参数 -用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 - -> > * **cls_thresh**(double): 当分类模型输出的得分超过此阈值,输入的图片将被翻转,默认为0.9 - -## 其它文档 - -- [PPOCR 系列模型介绍](../../) -- [PPOCRv2 Python部署](../python) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/cpp/infer.cc b/examples/vision/ocr/PP-OCRv2/cpp/infer.cc deleted file mode 100755 index 72a7fcf7ea..0000000000 --- a/examples/vision/ocr/PP-OCRv2/cpp/infer.cc +++ /dev/null @@ -1,134 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "fastdeploy/vision.h" -#ifdef WIN32 -const char sep = '\\'; -#else -const char sep = '/'; -#endif - -void InitAndInfer(const std::string& det_model_dir, const std::string& cls_model_dir, const std::string& rec_model_dir, const std::string& rec_label_file, const std::string& image_file, const fastdeploy::RuntimeOption& option) { - auto det_model_file = det_model_dir + sep + "inference.pdmodel"; - auto det_params_file = det_model_dir + sep + "inference.pdiparams"; - - auto cls_model_file = cls_model_dir + sep + "inference.pdmodel"; - auto cls_params_file = cls_model_dir + sep + "inference.pdiparams"; - - auto rec_model_file = rec_model_dir + sep + "inference.pdmodel"; - auto rec_params_file = rec_model_dir + sep + "inference.pdiparams"; - - auto det_option = option; - auto cls_option = option; - auto rec_option = option; - - // The cls and rec model can inference a batch of images now. - // User could initialize the inference batch size and set them after create PPOCR model. - int cls_batch_size = 1; - int rec_batch_size = 6; - - // If use TRT backend, the dynamic shape will be set as follow. - // We recommend that users set the length and height of the detection model to a multiple of 32. - det_option.SetTrtInputShape("x", {1, 3, 64,64}, {1, 3, 640, 640}, - {1, 3, 960, 960}); - cls_option.SetTrtInputShape("x", {1, 3, 48, 10}, {cls_batch_size, 3, 48, 320}, {cls_batch_size, 3, 48, 1024}); - rec_option.SetTrtInputShape("x", {1, 3, 32, 10}, {rec_batch_size, 3, 32, 320}, - {rec_batch_size, 3, 32, 2304}); - - // Users could save TRT cache file to disk as follow. - // det_option.SetTrtCacheFile(det_model_dir + sep + "det_trt_cache.trt"); - // cls_option.SetTrtCacheFile(cls_model_dir + sep + "cls_trt_cache.trt"); - // rec_option.SetTrtCacheFile(rec_model_dir + sep + "rec_trt_cache.trt"); - - auto det_model = fastdeploy::vision::ocr::DBDetector(det_model_file, det_params_file, det_option); - auto cls_model = fastdeploy::vision::ocr::Classifier(cls_model_file, cls_params_file, cls_option); - auto rec_model = fastdeploy::vision::ocr::Recognizer(rec_model_file, rec_params_file, rec_label_file, rec_option); - - assert(det_model.Initialized()); - assert(cls_model.Initialized()); - assert(rec_model.Initialized()); - - // The classification model is optional, so the PP-OCR can also be connected in series as follows - // auto ppocr_v2 = fastdeploy::pipeline::PPOCRv2(&det_model, &rec_model); - auto ppocr_v2 = fastdeploy::pipeline::PPOCRv2(&det_model, &cls_model, &rec_model); - - // Set inference batch size for cls model and rec model, the value could be -1 and 1 to positive infinity. - // When inference batch size is set to -1, it means that the inference batch size - // of the cls and rec models will be the same as the number of boxes detected by the det model. - ppocr_v2.SetClsBatchSize(cls_batch_size); - ppocr_v2.SetRecBatchSize(rec_batch_size); - - if(!ppocr_v2.Initialized()){ - std::cerr << "Failed to initialize PP-OCR." << std::endl; - return; - } - - auto im = cv::imread(image_file); - auto im_bak = im.clone(); - - fastdeploy::vision::OCRResult result; - if (!ppocr_v2.Predict(&im, &result)) { - std::cerr << "Failed to predict." << std::endl; - return; - } - - std::cout << result.Str() << std::endl; - - auto vis_im = fastdeploy::vision::VisOcr(im_bak, result); - cv::imwrite("vis_result.jpg", vis_im); - std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; -} - -int main(int argc, char* argv[]) { - if (argc < 7) { - std::cout << "Usage: infer_demo path/to/det_model path/to/cls_model " - "path/to/rec_model path/to/rec_label_file path/to/image " - "run_option, " - "e.g ./infer_demo ./ch_PP-OCRv2_det_infer " - "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer " - "./ppocr_keys_v1.txt ./12.jpg 0" - << std::endl; - std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " - "with gpu; 2: run with gpu and use tensorrt backend; 3: run with gpu and use Paddle-TRT; 4: run with kunlunxin." - << std::endl; - return -1; - } - - fastdeploy::RuntimeOption option; - int flag = std::atoi(argv[6]); - - if (flag == 0) { - option.UseCpu(); - } else if (flag == 1) { - option.UseGpu(); - } else if (flag == 2) { - option.UseGpu(); - option.UseTrtBackend(); - } else if (flag == 3) { - option.UseGpu(); - option.UseTrtBackend(); - option.EnablePaddleTrtCollectShape(); - option.EnablePaddleToTrt(); - } else if (flag == 4) { - option.UseKunlunXin(); - } - - std::string det_model_dir = argv[1]; - std::string cls_model_dir = argv[2]; - std::string rec_model_dir = argv[3]; - std::string rec_label_file = argv[4]; - std::string test_image = argv[5]; - InitAndInfer(det_model_dir, cls_model_dir, rec_model_dir, rec_label_file, test_image, option); - return 0; -} diff --git a/examples/vision/ocr/PP-OCRv2/csharp/README.md b/examples/vision/ocr/PP-OCRv2/csharp/README.md deleted file mode 100755 index ef03a247d3..0000000000 --- a/examples/vision/ocr/PP-OCRv2/csharp/README.md +++ /dev/null @@ -1,153 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv2 C# Deployment Example - -This directory provides `infer.cs` to finish the deployment of PPOCRv2 on CPU/GPU. - -Before deployment, two steps require confirmation - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Please follow below instructions to compile and test in Windows. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model. - -## 1. Download C# package management tool nuget client -> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe - -Add nuget program into system variable **PATH** - -## 2. Download model and image for test -> https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar (Decompress it) -> https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -> https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -## 3. Compile example code - -Open `x64 Native Tools Command Prompt for VS 2019` command tool on Windows, cd to the demo path of ppyoloe and execute commands - -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv2\csharp - -mkdir build && cd build -cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2" - -nuget restore -msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64 -``` - -For more information about how to use FastDeploy SDK to compile a project with Visual Studio 2019. Please refer to -- [Using the FastDeploy C++ SDK on Windows Platform](../../../../../docs/en/faq/use_sdk_on_windows.md) - -## 4. Execute compiled program - -fastdeploy.dll and related dynamic libraries are required by the program. FastDeploy provide a script to copy all required dll to your program path. - -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x - -fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv2\csharp\build\Release -``` - -Then you can run your program and test the model with image -```shell -cd Release -# CPU inference -infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -## PPOCRv2 C# Interface - -### Model Class - -```c# -fastdeploy.vision.ocr.DBDetector( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> DBDetector initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.vision.ocr.Classifier( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Classifier initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.vision.ocr.Recognizer( - string model_file, - string params_file, - string label_path, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Recognizer initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.pipeline.PPOCRv2Model( - DBDetector dbdetector, - Classifier classifier, - Recognizer recognizer) -``` - -> PPOCRv2Model initialization - -> **Params** - ->> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector model ->> * **cls_model**(FD_C_ClassifierWrapper*): Classifier model ->> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer model - -#### Predict Function - -```c# -fastdeploy.OCRResult Predict(OpenCvSharp.Mat im) -``` - -> Model prediction interface. Input images and output results directly. -> -> **Params** -> ->> * **im**(Mat): Input images in HWC or BGR format ->> -> **Return** -> ->> * **result**: OCR prediction results, including the position of the detection box from the detection model, the classification of the direction from the classification model, and the recognition result from the recognition model. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for OCRResult - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv2 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/csharp/README_CN.md b/examples/vision/ocr/PP-OCRv2/csharp/README_CN.md deleted file mode 100644 index f7447a2dd5..0000000000 --- a/examples/vision/ocr/PP-OCRv2/csharp/README_CN.md +++ /dev/null @@ -1,153 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv2 C#部署示例 - -本目录下提供`infer.cs`来调用C# API快速完成PPOCRv2模型在CPU/GPU上部署的示例。 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -在本目录执行如下命令即可在Windows完成编译测试,支持此模型需保证FastDeploy版本1.0.4以上(x.x.x>=1.0.4) - -## 1. 下载C#包管理程序nuget客户端 -> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe - -下载完成后将该程序添加到环境变量**PATH**中 - -## 2. 下载模型文件和测试图片 -> https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar # (下载后解压缩) -> https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -> https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -## 3. 编译示例代码 - -本文档编译的示例代码可在解压的库中找到,编译工具依赖VS 2019的安装,**Windows打开x64 Native Tools Command Prompt for VS 2019命令工具**,通过如下命令开始编译 - -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv2\csharp - -mkdir build && cd build -cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2" - -nuget restore -msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64 -``` - -关于使用Visual Studio 2019创建sln工程,或者CMake工程等方式编译的更详细信息,可参考如下文档 -- [在 Windows 使用 FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) -- [FastDeploy C++库在Windows上的多种使用方式](../../../../../docs/cn/faq/use_sdk_on_windows_build.md) - -## 4. 运行可执行程序 - -注意Windows上运行时,需要将FastDeploy依赖的库拷贝至可执行程序所在目录, 或者配置环境变量。FastDeploy提供了工具帮助我们快速将所有依赖库拷贝至可执行程序所在目录,通过如下命令将所有依赖的dll文件拷贝至可执行程序所在的目录(可能生成的可执行文件在Release下还有一层目录,这里假设生成的可执行文件在Release处) -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x - -fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv2\csharp\build\Release -``` - -将dll拷贝到当前路径后,准备好模型和图片,使用如下命令运行可执行程序即可 -```shell -cd Release -# CPU推理 -infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU推理 -infer_demo ./ch_PP-OCRv2_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -## PPOCRv2 C#接口 - -### 模型 - -```c# -fastdeploy.vision.ocr.DBDetector( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> DBDetector模型加载和初始化。 - -> **参数** - ->> * **model_file**(str): 模型文件路径 ->> * **params_file**(str): 参数文件路径 ->> * **runtime_option**(RuntimeOption): 后端推理配置,默认为null,即采用默认配置 ->> * **model_format**(ModelFormat): 模型格式,默认为PADDLE格式 - -```c# -fastdeploy.vision.ocr.Classifier( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Classifier模型加载和初始化。 - -> **参数** - ->> * **model_file**(str): 模型文件路径 ->> * **params_file**(str): 参数文件路径 ->> * **runtime_option**(RuntimeOption): 后端推理配置,默认为null,即采用默认配置 ->> * **model_format**(ModelFormat): 模型格式,默认为PADDLE格式 - -```c# -fastdeploy.vision.ocr.Recognizer( - string model_file, - string params_file, - string label_path, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Recognizer模型加载和初始化。 - -> **参数** - ->> * **model_file**(str): 模型文件路径 ->> * **params_file**(str): 参数文件路径 ->> * **label_path**(str): 标签文件路径 ->> * **runtime_option**(RuntimeOption): 后端推理配置,默认为null,即采用默认配置 ->> * **model_format**(ModelFormat): 模型格式,默认为PADDLE格式 - -```c# -fastdeploy.pipeline.PPOCRv2Model( - DBDetector dbdetector, - Classifier classifier, - Recognizer recognizer) -``` - -> PPOCRv2Model模型加载和初始化。 - -> **参数** - ->> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector模型 ->> * **cls_model**(FD_C_ClassifierWrapper*): Classifier模型 ->> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer模型文件 - -#### Predict函数 - -```c# -fastdeploy.OCRResult Predict(OpenCvSharp.Mat im) -``` - -> 模型预测接口,输入图像直接输出结果。 -> -> **参数** -> ->> * **im**(Mat): 输入图像,注意需为HWC,BGR格式 ->> -> **返回值** -> ->> * **result**: OCR预测结果,包括由检测模型输出的检测框位置,分类模型输出的方向分类,以及识别模型输出的识别结果, OCRResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) - - -- [模型介绍](../../) -- [Python部署](../python) -- [视觉模型预测结果](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/csharp/infer.cs b/examples/vision/ocr/PP-OCRv2/csharp/infer.cs deleted file mode 100644 index cc54a5551d..0000000000 --- a/examples/vision/ocr/PP-OCRv2/csharp/infer.cs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -using System; -using System.IO; -using System.Runtime.InteropServices; -using OpenCvSharp; -using fastdeploy; - -namespace Test -{ - public class TestPPOCRv2 - { - public static void Main(string[] args) - { - if (args.Length < 6) { - Console.WriteLine( - "Usage: infer_demo path/to/det_model path/to/cls_model " + - "path/to/rec_model path/to/rec_label_file path/to/image " + - "run_option, " + - "e.g ./infer_demo ./ch_PP-OCRv2_det_infer " + - "./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv2_rec_infer " + - "./ppocr_keys_v1.txt ./12.jpg 0" - ); - Console.WriteLine( "The data type of run_option is int, 0: run with cpu; 1: run with gpu"); - return; - } - string det_model_dir = args[0]; - string cls_model_dir = args[1]; - string rec_model_dir = args[2]; - string rec_label_file = args[3]; - string image_path = args[4]; - RuntimeOption runtimeoption = new RuntimeOption(); - int device_option = Int32.Parse(args[5]); - if(device_option==0){ - runtimeoption.UseCpu(); - }else{ - runtimeoption.UseGpu(); - } - string sep = "\\"; - string det_model_file = det_model_dir + sep + "inference.pdmodel"; - string det_params_file = det_model_dir + sep + "inference.pdiparams"; - - string cls_model_file = cls_model_dir + sep + "inference.pdmodel"; - string cls_params_file = cls_model_dir + sep + "inference.pdiparams"; - - string rec_model_file = rec_model_dir + sep + "inference.pdmodel"; - string rec_params_file = rec_model_dir + sep + "inference.pdiparams"; - - fastdeploy.vision.ocr.DBDetector dbdetector = new fastdeploy.vision.ocr.DBDetector(det_model_file, det_params_file, runtimeoption, ModelFormat.PADDLE); - fastdeploy.vision.ocr.Classifier classifier = new fastdeploy.vision.ocr.Classifier(cls_model_file, cls_params_file, runtimeoption, ModelFormat.PADDLE); - fastdeploy.vision.ocr.Recognizer recognizer = new fastdeploy.vision.ocr.Recognizer(rec_model_file, rec_params_file, rec_label_file, runtimeoption, ModelFormat.PADDLE); - fastdeploy.pipeline.PPOCRv2 model = new fastdeploy.pipeline.PPOCRv2(dbdetector, classifier, recognizer); - if(!model.Initialized()){ - Console.WriteLine("Failed to initialize.\n"); - } - Mat image = Cv2.ImRead(image_path); - fastdeploy.vision.OCRResult res = model.Predict(image); - Console.WriteLine(res.ToString()); - Mat res_img = fastdeploy.vision.Visualize.VisOcr(image, res); - Cv2.ImShow("result.png", res_img); - Cv2.ImWrite("result.png", res_img); - Cv2.WaitKey(0); - - } - - } -} \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv2/python/README.md b/examples/vision/ocr/PP-OCRv2/python/README.md deleted file mode 100755 index 84282ae0c8..0000000000 --- a/examples/vision/ocr/PP-OCRv2/python/README.md +++ /dev/null @@ -1,131 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv2 Python Deployment Example - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Install FastDeploy Python whl package. Refer to [FastDeploy Python Installation](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -This directory provides examples that `infer.py` fast finishes the deployment of PPOCRv2 on CPU/GPU and GPU accelerated by TensorRT. The script is as follows - -``` - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - - -# Download the example code for deployment -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/ocr/PP-OCRv2/python/ - -# CPU inference -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu -# GPU inference -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu -# TensorRT inference on GPU -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend trt -# KunlunXin XPU inference -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device kunlunxin -``` - -The visualized result after running is as follows - - -## PPOCRv2 Python Interface - -``` -fd.vision.ocr.PPOCRv2(det_model=det_model, cls_model=cls_model, rec_model=rec_model) -``` -To initialize PPOCRv2, the input parameters correspond to detection model, classification model, and recognition model. Among them, cls_model is optional. It can be set to None if there is no demand - -**Parameter** - -> * **det_model**(model): Detection model in OCR -> * **cls_model**(model): Classification model in OCR -> * **rec_model**(model): Recognition model in OCR - -### predict function - -> ``` -> result = ppocr_v2.predict(im) -> ``` -> -> Model prediction interface. Input one image. -> -> **Parameter** -> -> > * **im**(np.ndarray): Input data in HWC or BGR format - -> **Return** -> -> > Return the `fastdeploy.vision.OCRResult` structure. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for its description. - - - -## DBDetector Python Interface - -### DBDetector Class - -``` -fastdeploy.vision.ocr.DBDetector(model_file, params_file, runtime_option=None, model_format=ModelFormat.PADDLE) -``` - -DBDetector model loading and initialization. The model is in paddle format. - -**Parameter** - -> * **model_file**(str): Model file path -> * **params_file**(str): Parameter file path. Merely passing an empty string when the model is in ONNX format -> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(ModelFormat): Model format. PADDLE format by default - -### The same applies to the Classifier class - -### Recognizer Class -``` -fastdeploy.vision.ocr.Recognizer(rec_model_file,rec_params_file,rec_label_file, - runtime_option=rec_runtime_option,model_format=ModelFormat.PADDLE) -``` -To initialize the Recognizer class, users need to input the label file path required by the recognition model in the rec_label_file parameter. Other parameters are the same as those of DBDetector class - -**Parameter** -> * **label_path**(str): The label path of the recognition model - - - -### Class Member Variable - -#### DBDetector Pre-processing Parameter -Users can modify the following pre-processing parameters to their needs, which affects the final inference and deployment results - -> > * **max_side_len**(int): The long side’s maximum size of the oriented view before detection. The long side will be resized to this size when exceeding the value. And the short side will be scaled in equal proportion. Default 960 -> > * **det_db_thresh**(double): The binarization threshold of the prediction image from DB models. Default 0.3 -> > * **det_db_box_thresh**(double): The threshold for the output box of DB models, below which the predicted box is discarded. Default 0.6 -> > * **det_db_unclip_ratio**(double): The expansion ratio of the DB model output box. Default 1.5 -> > * **det_db_score_mode**(string): The way to calculate the average score of the text box in DB post-processing. Default slow, which is identical to the calculation of the polygon area’s average score -> > * **use_dilation**(bool): Whether to expand the feature map from the detection. Default False - -#### Classifier Pre-processing Parameter -Users can modify the following pre-processing parameters to their needs, which affects the final inference and deployment results - -> > * **cls_thresh**(double): The input image will be flipped when the score output by the classification model exceeds this threshold. Default 0.9 - - - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv2 C++ Deployment](../cpp) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/python/README_CN.md b/examples/vision/ocr/PP-OCRv2/python/README_CN.md deleted file mode 100644 index 9eea8ba5c8..0000000000 --- a/examples/vision/ocr/PP-OCRv2/python/README_CN.md +++ /dev/null @@ -1,133 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv2 Python部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -本目录下提供`infer.py`快速完成PPOCRv2在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 - -``` - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar -tar -xvf ch_PP-OCRv2_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar -tar -xvf ch_PP-OCRv2_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - - -#下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/ocr/PP-OCRv2/python/ - -# CPU推理 -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu -# GPU推理 -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu -# GPU上使用TensorRT推理 -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend trt -# 昆仑芯XPU推理 -python infer.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device kunlunxin -# 华为昇腾推理,需要使用静态shape脚本, 若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸 -python infer_static_shape.py --det_model ch_PP-OCRv2_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv2_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device ascend -``` - -运行完成可视化结果如下图所示 - - -## PPOCRv2 Python接口 - -``` -fd.vision.ocr.PPOCRv2(det_model=det_model, cls_model=cls_model, rec_model=rec_model) -``` -PPOCRv2的初始化,输入的参数是检测模型,分类模型和识别模型,其中cls_model可选,如无需求,可设置为None - -**参数** - -> * **det_model**(model): OCR中的检测模型 -> * **cls_model**(model): OCR中的分类模型 -> * **rec_model**(model): OCR中的识别模型 - -### predict函数 - -> ``` -> result = ppocr_v2.predict(im) -> ``` -> -> 模型预测接口,输入是一张图片 -> -> **参数** -> -> > * **im**(np.ndarray): 输入数据,每张图片注意需为HWC,BGR格式 - -> **返回** -> -> > 返回`fastdeploy.vision.OCRResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) - - - -## DBDetector Python接口 - -### DBDetector类 - -``` -fastdeploy.vision.ocr.DBDetector(model_file, params_file, runtime_option=None, model_format=ModelFormat.PADDLE) -``` - -DBDetector模型加载和初始化,其中模型为paddle模型格式。 - -**参数** - -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(ModelFormat): 模型格式,默认为PADDLE格式 - -### Classifier类与DBDetector类相同 - -### Recognizer类 -``` -fastdeploy.vision.ocr.Recognizer(rec_model_file,rec_params_file,rec_label_file, - runtime_option=rec_runtime_option,model_format=ModelFormat.PADDLE) -``` -Recognizer类初始化时,需要在rec_label_file参数中,输入识别模型所需的label文件路径,其他参数均与DBDetector类相同 - -**参数** -> * **label_path**(str): 识别模型的label文件路径 - - - -### 类成员变量 - -#### DBDetector预处理参数 -用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 - -> > * **max_side_len**(int): 检测算法前向时图片长边的最大尺寸,当长边超出这个值时会将长边resize到这个大小,短边等比例缩放,默认为960 -> > * **det_db_thresh**(double): DB模型输出预测图的二值化阈值,默认为0.3 -> > * **det_db_box_thresh**(double): DB模型输出框的阈值,低于此值的预测框会被丢弃,默认为0.6 -> > * **det_db_unclip_ratio**(double): DB模型输出框扩大的比例,默认为1.5 -> > * **det_db_score_mode**(string):DB后处理中计算文本框平均得分的方式,默认为slow,即求polygon区域的平均分数的方式 -> > * **use_dilation**(bool):是否对检测输出的feature map做膨胀处理,默认为Fasle - -#### Classifier预处理参数 -用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 - -> > * **cls_thresh**(double): 当分类模型输出的得分超过此阈值,输入的图片将被翻转,默认为0.9 - - - -## 其它文档 - -- [PPOCR 系列模型介绍](../../) -- [PPOCRv2 C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv2/python/infer.py b/examples/vision/ocr/PP-OCRv2/python/infer.py deleted file mode 100755 index 6e8fe62b15..0000000000 --- a/examples/vision/ocr/PP-OCRv2/python/infer.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fastdeploy as fd -import cv2 -import os - - -def parse_arguments(): - import argparse - import ast - parser = argparse.ArgumentParser() - parser.add_argument( - "--det_model", required=True, help="Path of Detection model of PPOCR.") - parser.add_argument( - "--cls_model", - required=True, - help="Path of Classification model of PPOCR.") - parser.add_argument( - "--rec_model", - required=True, - help="Path of Recognization model of PPOCR.") - parser.add_argument( - "--rec_label_file", - required=True, - help="Path of Recognization model of PPOCR.") - parser.add_argument( - "--image", type=str, required=True, help="Path of test image file.") - parser.add_argument( - "--device", - type=str, - default='cpu', - help="Type of inference device, support 'cpu', 'kunlunxin' or 'gpu'.") - parser.add_argument( - "--backend", - type=str, - default="default", - help="Type of inference backend, support ort/trt/paddle/openvino, default 'openvino' for cpu, 'tensorrt' for gpu" - ) - parser.add_argument( - "--device_id", - type=int, - default=0, - help="Define which GPU card used to run model.") - parser.add_argument( - "--cpu_thread_num", - type=int, - default=9, - help="Number of threads while inference on CPU.") - parser.add_argument( - "--cls_bs", - type=int, - default=1, - help="Classification model inference batch size.") - parser.add_argument( - "--rec_bs", - type=int, - default=6, - help="Recognition model inference batch size") - return parser.parse_args() - - -def build_option(args): - - det_option = fd.RuntimeOption() - cls_option = fd.RuntimeOption() - rec_option = fd.RuntimeOption() - - det_option.set_cpu_thread_num(args.cpu_thread_num) - cls_option.set_cpu_thread_num(args.cpu_thread_num) - rec_option.set_cpu_thread_num(args.cpu_thread_num) - - if args.device.lower() == "gpu": - det_option.use_gpu(args.device_id) - cls_option.use_gpu(args.device_id) - rec_option.use_gpu(args.device_id) - - if args.device.lower() == "kunlunxin": - det_option.use_kunlunxin() - cls_option.use_kunlunxin() - rec_option.use_kunlunxin() - - return det_option, cls_option, rec_option - - if args.backend.lower() == "trt": - assert args.device.lower( - ) == "gpu", "TensorRT backend require inference on device GPU." - det_option.use_trt_backend() - cls_option.use_trt_backend() - rec_option.use_trt_backend() - - # 设置trt input shape - # 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数. - det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], - [1, 3, 960, 960]) - cls_option.set_trt_input_shape("x", [1, 3, 48, 10], - [args.cls_bs, 3, 48, 320], - [args.cls_bs, 3, 48, 1024]) - rec_option.set_trt_input_shape("x", [1, 3, 32, 10], - [args.rec_bs, 3, 32, 320], - [args.rec_bs, 3, 32, 2304]) - - # 用户可以把TRT引擎文件保存至本地 - det_option.set_trt_cache_file(args.det_model + "/det_trt_cache.trt") - cls_option.set_trt_cache_file(args.cls_model + "/cls_trt_cache.trt") - rec_option.set_trt_cache_file(args.rec_model + "/rec_trt_cache.trt") - - elif args.backend.lower() == "pptrt": - assert args.device.lower( - ) == "gpu", "Paddle-TensorRT backend require inference on device GPU." - det_option.use_trt_backend() - det_option.enable_paddle_trt_collect_shape() - det_option.enable_paddle_to_trt() - - cls_option.use_trt_backend() - cls_option.enable_paddle_trt_collect_shape() - cls_option.enable_paddle_to_trt() - - rec_option.use_trt_backend() - rec_option.enable_paddle_trt_collect_shape() - rec_option.enable_paddle_to_trt() - - # 设置trt input shape - # 如果用户想要自己改动检测模型的输入shape, 我们建议用户把检测模型的长和高设置为32的倍数. - det_option.set_trt_input_shape("x", [1, 3, 64, 64], [1, 3, 640, 640], - [1, 3, 960, 960]) - cls_option.set_trt_input_shape("x", [1, 3, 48, 10], - [args.cls_bs, 3, 48, 320], - [args.cls_bs, 3, 48, 1024]) - rec_option.set_trt_input_shape("x", [1, 3, 32, 10], - [args.rec_bs, 3, 32, 320], - [args.rec_bs, 3, 32, 2304]) - - # 用户可以把TRT引擎文件保存至本地 - det_option.set_trt_cache_file(args.det_model) - cls_option.set_trt_cache_file(args.cls_model) - rec_option.set_trt_cache_file(args.rec_model) - - elif args.backend.lower() == "ort": - det_option.use_ort_backend() - cls_option.use_ort_backend() - rec_option.use_ort_backend() - - elif args.backend.lower() == "paddle": - det_option.use_paddle_infer_backend() - cls_option.use_paddle_infer_backend() - rec_option.use_paddle_infer_backend() - - elif args.backend.lower() == "openvino": - assert args.device.lower( - ) == "cpu", "OpenVINO backend require inference on device CPU." - det_option.use_openvino_backend() - cls_option.use_openvino_backend() - rec_option.use_openvino_backend() - - return det_option, cls_option, rec_option - - -args = parse_arguments() - -# Detection模型, 检测文字框 -det_model_file = os.path.join(args.det_model, "inference.pdmodel") -det_params_file = os.path.join(args.det_model, "inference.pdiparams") -# Classification模型,方向分类,可选 -cls_model_file = os.path.join(args.cls_model, "inference.pdmodel") -cls_params_file = os.path.join(args.cls_model, "inference.pdiparams") -# Recognition模型,文字识别模型 -rec_model_file = os.path.join(args.rec_model, "inference.pdmodel") -rec_params_file = os.path.join(args.rec_model, "inference.pdiparams") -rec_label_file = args.rec_label_file - -# 对于三个模型,均采用同样的部署配置 -# 用户也可根据自己的需求,个性化配置 -det_option, cls_option, rec_option = build_option(args) - -det_model = fd.vision.ocr.DBDetector( - det_model_file, det_params_file, runtime_option=det_option) - -cls_model = fd.vision.ocr.Classifier( - cls_model_file, cls_params_file, runtime_option=cls_option) - -rec_model = fd.vision.ocr.Recognizer( - rec_model_file, rec_params_file, rec_label_file, runtime_option=rec_option) - -# 创建PP-OCR,串联3个模型,其中cls_model可选,如无需求,可设置为None -ppocr_v2 = fd.vision.ocr.PPOCRv2( - det_model=det_model, cls_model=cls_model, rec_model=rec_model) - -# 给cls和rec模型设置推理时的batch size -# 此值能为-1, 和1到正无穷 -# 当此值为-1时, cls和rec模型的batch size将默认和det模型检测出的框的数量相同 -ppocr_v2.cls_batch_size = args.cls_bs -ppocr_v2.rec_batch_size = args.rec_bs - -# 预测图片准备 -im = cv2.imread(args.image) - -#预测并打印结果 -result = ppocr_v2.predict(im) - -print(result) - -# 可视化结果 -vis_im = fd.vision.vis_ppocr(im, result) -cv2.imwrite("visualized_result.jpg", vis_im) -print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/ocr/PP-OCRv2/serving/README.md b/examples/vision/ocr/PP-OCRv2/serving/README.md deleted file mode 100644 index 2049564c9e..0000000000 --- a/examples/vision/ocr/PP-OCRv2/serving/README.md +++ /dev/null @@ -1,13 +0,0 @@ -English | [简体中文](README_CN.md) -# PP-OCRv2 Serving Deployment - -The serving deployment of PP-OCRv2 is identical to that of PP-OCRv3 except for `downloaded models` and `1 parameter for rec pre-processing`. Refer to [PP-OCRv3 serving deployment](../../PP-OCRv3/serving) - -## Download models -Change `v3` into `v2` in the download link. - -## Modify the rec pre-processing parameter -**Add the following code to line 109** in the file [model.py](../../PP-OCRv3/serving/models/det_postprocess/1/model.py#L109): -``` -self.rec_preprocessor.cls_image_shape[1] = 32 -``` diff --git a/examples/vision/ocr/PP-OCRv2/serving/README_CN.md b/examples/vision/ocr/PP-OCRv2/serving/README_CN.md deleted file mode 100644 index f83e8b0b4d..0000000000 --- a/examples/vision/ocr/PP-OCRv2/serving/README_CN.md +++ /dev/null @@ -1,13 +0,0 @@ -[English](README.md) | 简体中文 -# PP-OCRv2服务化部署示例 - -除了`下载的模型`和`rec前处理的1个参数`以外PP-OCRv2的服务化部署与PP-OCRv3服务化部署全部一样,请参考[PP-OCRv3服务化部署](../../PP-OCRv3/serving)。 - -## 下载模型 -将下载链接中的`v3`改为`v2`即可。 - -## 修改rec前处理参数 -在[model.py](../../PP-OCRv3/serving/models/det_postprocess/1/model.py#L109)文件**109行添加以下代码**: -``` -self.rec_preprocessor.cls_image_shape[1] = 32 -``` diff --git a/examples/vision/ocr/PP-OCRv3/android/.gitignore b/examples/vision/ocr/PP-OCRv3/android/.gitignore deleted file mode 100644 index f6eba672f0..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -.DS_Store -.idea -.gradle -.cxx -cache -build -app/cache -app/libs/fastdeploy* -app/.cxx -app/build -app/src/main/assets/models/* -app/.gradle -app/.idea -fastdeploy/cache -fastdeploy/libs/fastdeploy* -fastdeploy/.cxx -fastdeploy/build -fastdeploy/src/main/assets/models/* -fastdeploy/.gradle -fastdeploy/.idea diff --git a/examples/vision/ocr/PP-OCRv3/android/README.md b/examples/vision/ocr/PP-OCRv3/android/README.md deleted file mode 100644 index f770c73916..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/README.md +++ /dev/null @@ -1,222 +0,0 @@ -English | [简体中文](README_CN.md) -# OCR Text Recognition Android Demo Tutorial - -Real-time OCR text recognition on Android. This demo is easy to use for everyone. For example, you can run your own trained model in the demo. - -## Prepare the Environment - -1. Install Android Studio in your local environment. Refer to [Android Studio Official Website](https://developer.android.com/studio) for detailed tutorial. -2. Prepare an Android phone and turn on the USB debug mode. Opening: `Settings -> Find developer options -> Open developer options and USB debug mode` - -## Deployment steps - -1. The OCR text recognition Demo is located in the `fastdeploy/examples/vision/ocr/PP-OCRv3/android` -2. Open PP-OCRv2/android project with Android Studio -3. Connect the phone to the computer, turn on USB debug mode and file transfer mode, and connect your phone to Android Studio (allow the phone to install software from USB) - -

-image -

- -> **Attention:** ->> If you encounter an NDK configuration error during import, compilation or running, open ` File > Project Structure > SDK Location` and change the path of SDK configured by the `Andriod SDK location` - -4. Click the Run button to automatically compile the APP and install it to the phone. (The process will automatically download the pre-compiled FastDeploy Android library and model files. Internet is required). -The final effect is as follows. Figure 1: Install the APP on the phone; Figure 2: The effect after opening the APP. It will automatically recognize and mark the objects in the image; Figure 3: APP setting option. Click setting in the upper right corner and modify your options. - -| APP Icon | APP Effect | APP Settings - | --- | --- | --- | -| ![app_pic](https://user-images.githubusercontent.com/14995488/203484427-83de2316-fd60-4baf-93b6-3755f9b5559d.jpg) | ![app_res](https://user-images.githubusercontent.com/14995488/203495616-af42a5b7-d3bc-4fce-8d5e-2ed88454f618.jpg) | ![app_setup](https://user-images.githubusercontent.com/14995488/203484436-57fdd041-7dcc-4e0e-b6cb-43e5ac1e729b.jpg) | - -### PP-OCRv3 Java API Description - -- Model initialized API: The initialized API contains two ways: Firstly, initialize directly through the constructor. Secondly, initialize at the appropriate program node by calling the init function. PP-OCR initialization parameters are as follows: - - modelFile: String. Model file path in paddle format, such as model.pdmodel - - paramFile: String. Parameter file path in paddle format, such as model.pdiparams - - labelFile: String. This optional parameter indicates the path of the label file and is used for visualization. such as ppocr_keys_v1.txt, each line containing one label - - option: RuntimeOption. Optional parameter for model initialization. Default runtime options if the parameter is not passed. Different from other models, PP-OCRv3 contains base models such as DBDetector, Classifier, Recognizer and the pipeline type. -```java -// Constructor: constructor w/o label file -public DBDetector(String modelFile, String paramsFile); -public DBDetector(String modelFile, String paramsFile, RuntimeOption option); -public Classifier(String modelFile, String paramsFile); -public Classifier(String modelFile, String paramsFile, RuntimeOption option); -public Recognizer(String modelFile, String paramsFile, String labelPath); -public Recognizer(String modelFile, String paramsFile, String labelPath, RuntimeOption option); -public PPOCRv3(); // An empty constructor, which can be initialized by calling init -// Constructor w/o classifier -public PPOCRv3(DBDetector detModel, Recognizer recModel); -public PPOCRv3(DBDetector detModel, Classifier clsModel, Recognizer recModel); -``` -- Model Prediction API: The Model Prediction API contains an API for direct prediction and an API for visualization. In direct prediction, we do not save the image and render the result on Bitmap. Instead, we merely predict the inference result. For prediction and visualization, the results are both predicted and visualized, the visualized images are saved to the specified path, and the visualized results are rendered in Bitmap (Now Bitmap in ARGB8888 format is supported). Afterward, the Bitmap can be displayed on the camera. -```java -// Direct prediction: No image saving and no result rendering to Bitmap -public OCRResult predict(Bitmap ARGB8888Bitmap); -// Prediction and visualization: Predict and visualize the results, save the visualized image to the specified path, and render the visualized results on Bitmap -public OCRResult predict(Bitmap ARGB8888Bitmap, String savedImagePath); -public OCRResult predict(Bitmap ARGB8888Bitmap, boolean rendering); // Render without saving images -``` -- Model resource release API: Call release() API to release model resources. Return true for successful release and false for failure; call initialized() to determine whether the model was initialized successfully, with true indicating successful initialization and false indicating failure. -```java -public boolean release(); // Release native resources -public boolean initialized(); // Check if initialization was successful -``` - -- RuntimeOption settings - -```java -public void enableLiteFp16(); // Enable fp16 accuracy inference -public void disableLiteFP16(); // Disable fp16 accuracy inference -public void enableLiteInt8(); // Enable int8 accuracy inference for quantification models -public void disableLiteInt8(); // Disable int8 accuracy inference -public void setCpuThreadNum(int threadNum); // Set thread numbers -public void setLitePowerMode(LitePowerMode mode); // Set power mode -public void setLitePowerMode(String modeStr); // Set power mode through character string -``` - -- Model OCRResult -```java -public class OCRResult { - public int[][] mBoxes; // The coordinates of all target boxes in a single image. 8 int values represent the 4 coordinate points of the box in the order of bottom left, bottom right, top right and top left - public String[] mText; // Recognized text in multiple text boxes - public float[] mRecScores; // Confidence of the recognized text in the box - public float[] mClsScores; // Confidence of the classification result of the text box - public int[] mClsLabels; // Directional classification of the text box - public boolean mInitialized = false; // Whether the result is valid or not -} -``` -Refer to [api/vision_results/ocr_result.md](https://github.com/PaddlePaddle/FastDeploy/blob/develop/docs/api/vision_results/ocr_result.md) for C++/Python OCRResult - - -- Model Calling Example 1: Using Constructor -```java -import java.nio.ByteBuffer; -import android.graphics.Bitmap; -import android.opengl.GLES20; - -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; - -// Model path -String detModelFile = "ch_PP-OCRv3_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv3_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv3_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv3_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// Set the RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// Initialize the model -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -PPOCRv3 model = new PPOCRv3(detModel,clsModel,recModel); - -// Read the image: The following is merely the pseudo code to read the Bitmap -ByteBuffer pixelBuffer = ByteBuffer.allocate(width * height * 4); -GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); -Bitmap ARGB8888ImageBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); -ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer); - -// Model Inference -OCRResult result = model.predict(ARGB8888ImageBitmap); - -// Release model resources -model.release(); -``` - -- Model calling example 2: Manually call init at the appropriate program node -```java -// import is as above... -import com.baidu.paddle.fastdeploy.RuntimeOption; -import com.baidu.paddle.fastdeploy.LitePowerMode; -import com.baidu.paddle.fastdeploy.vision.OCRResult; -import com.baidu.paddle.fastdeploy.vision.ocr.Classifier; -import com.baidu.paddle.fastdeploy.vision.ocr.DBDetector; -import com.baidu.paddle.fastdeploy.vision.ocr.Recognizer; -// Create an empty model -PPOCRv3 model = new PPOCRv3(); -// Model path -String detModelFile = "ch_PP-OCRv3_det_infer/inference.pdmodel"; -String detParamsFile = "ch_PP-OCRv3_det_infer/inference.pdiparams"; -String clsModelFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdmodel"; -String clsParamsFile = "ch_ppocr_mobile_v2.0_cls_infer/inference.pdiparams"; -String recModelFile = "ch_PP-OCRv3_rec_infer/inference.pdmodel"; -String recParamsFile = "ch_PP-OCRv3_rec_infer/inference.pdiparams"; -String recLabelFilePath = "labels/ppocr_keys_v1.txt"; -// Set the RuntimeOption -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption clsOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -detOption.setCpuThreadNum(2); -clsOption.setCpuThreadNum(2); -recOption.setCpuThreadNum(2); -detOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -clsOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -recOption.setLitePowerMode(LitePowerMode.LITE_POWER_HIGH); -detOption.enableLiteFp16(); -clsOption.enableLiteFp16(); -recOption.enableLiteFp16(); -// Use init function for initialization -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Classifier clsModel = new Classifier(clsModelFile, clsParamsFile, clsOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -model.init(detModel, clsModel, recModel); -// Bitmap reading, model prediction, and resource release are as above ... -``` -Refer to [OcrMainActivity](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java) for more details - -## Replace FastDeploy SDK and Models -It’s simple to replace the FastDeploy prediction library and models. The prediction library is located at `app/libs/fastdeploy-android-sdk-xxx.aar`, where `xxx` represents the version of your prediction library. The models are located at `app/src/main/assets/models`. -- Replace the FastDeploy Android SDK: Download or compile the latest FastDeploy Android SDK, unzip and place it in the `app/libs`; For detailed configuration, refer to - - [FastDeploy Java SDK in Android](../../../../../java/android/) - -- Steps to replace OCR models: - - Put your OCR model in `app/src/main/assets/models`; - - Modify the default value of the model path in `app/src/main/res/values/strings.xml`. For example, -```xml - -models -labels/ppocr_keys_v1.txt -``` -## Use quantification models -If you're using quantification models, set Int8 accuracy inference using the interface enableLiteInt8() of RuntimeOption. -```java -String detModelFile = "ch_ppocrv3_plate_det_quant/inference.pdmodel"; -String detParamsFile = "ch_ppocrv3_plate_det_quant/inference.pdiparams"; -String recModelFile = "ch_ppocrv3_plate_rec_distillation_quant/inference.pdmodel"; -String recParamsFile = "ch_ppocrv3_plate_rec_distillation_quant/inference.pdiparams"; -String recLabelFilePath = "ppocr_keys_v1.txt"; // ppocr_keys_v1.txt -RuntimeOption detOption = new RuntimeOption(); -RuntimeOption recOption = new RuntimeOption(); -// Use Int8 accuracy for inference -detOption.enableLiteInt8(); -recOption.enableLiteInt8(); -// Initialize PP-OCRv3 Pipeline -PPOCRv3 predictor = new PPOCRv3(); -DBDetector detModel = new DBDetector(detModelFile, detParamsFile, detOption); -Recognizer recModel = new Recognizer(recModelFile, recParamsFile, recLabelFilePath, recOption); -predictor.init(detModel, recModel); -``` -Refer to [OcrMainActivity.java](./app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrMainActivity.java) for use-pattern in APP. - -## More Reference Documents -For more FastDeploy Java API documentes and how to access FastDeploy C++ API via JNI, refer to: -- [FastDeploy Java SDK in Android](../../../../../java/android/) -- [FastDeploy C++ SDK in Android](../../../../../docs/en/faq/use_cpp_sdk_on_android.md) diff --git a/examples/vision/ocr/PP-OCRv3/android/app/proguard-rules.pro b/examples/vision/ocr/PP-OCRv3/android/app/proguard-rules.pro deleted file mode 100644 index 481bb43481..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/proguard-rules.pro +++ /dev/null @@ -1,21 +0,0 @@ -# Add project specific ProGuard rules here. -# You can control the set of applied configuration files using the -# proguardFiles setting in build.gradle. -# -# For more details, see -# http://developer.android.com/guide/developing/tools/proguard.html - -# If your project uses WebView with JS, uncomment the following -# and specify the fully qualified class name to the JavaScript interface -# class: -#-keepclassmembers class fqcn.of.javascript.interface.for.webview { -# public *; -#} - -# Uncomment this to preserve the line number information for -# debugging stack traces. -#-keepattributes SourceFile,LineNumberTable - -# If you keep the line number information, uncomment this to -# hide the original source file name. -#-renamesourcefileattribute SourceFile \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java b/examples/vision/ocr/PP-OCRv3/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java deleted file mode 100644 index 0efacb7909..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/androidTest/java/com/baidu/paddle/fastdeploy/ExampleInstrumentedTest.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.baidu.paddle.fastdeploy; - -import android.content.Context; - -import androidx.test.platform.app.InstrumentationRegistry; -import androidx.test.ext.junit.runners.AndroidJUnit4; - -import org.junit.Test; -import org.junit.runner.RunWith; - -import static org.junit.Assert.*; - -/** - * Instrumented test, which will execute on an Android device. - * - * @see Testing documentation - */ -@RunWith(AndroidJUnit4.class) -public class ExampleInstrumentedTest { - @Test - public void useAppContext() { - // Context of the app under test. - Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext(); - assertEquals("com.baidu.paddle.fastdeploy", appContext.getPackageName()); - } -} \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/AndroidManifest.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/AndroidManifest.xml deleted file mode 100644 index 8493c0379f..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/AndroidManifest.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/assets/labels/ppocr_keys_v1.txt b/examples/vision/ocr/PP-OCRv3/android/app/src/main/assets/labels/ppocr_keys_v1.txt deleted file mode 100644 index b75af21303..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/assets/labels/ppocr_keys_v1.txt +++ /dev/null @@ -1,6623 +0,0 @@ -' -疗 -绚 -诚 -娇 -溜 -题 -贿 -者 -廖 -更 -纳 -加 -奉 -公 -一 -就 -汴 -计 -与 -路 -房 -原 -妇 -2 -0 -8 -- -7 -其 -> -: -] -, -, -骑 -刈 -全 -消 -昏 -傈 -安 -久 -钟 -嗅 -不 -影 -处 -驽 -蜿 -资 -关 -椤 -地 -瘸 -专 -问 -忖 -票 -嫉 -炎 -韵 -要 -月 -田 -节 -陂 -鄙 -捌 -备 -拳 -伺 -眼 -网 -盎 -大 -傍 -心 -东 -愉 -汇 -蹿 -科 -每 -业 -里 -航 -晏 -字 -平 -录 -先 -1 -3 -彤 -鲶 -产 -稍 -督 -腴 -有 -象 -岳 -注 -绍 -在 -泺 -文 -定 -核 -名 -水 -过 -理 -让 -偷 -率 -等 -这 -发 -” -为 -含 -肥 -酉 -相 -鄱 -七 -编 -猥 -锛 -日 -镀 -蒂 -掰 -倒 -辆 -栾 -栗 -综 -涩 -州 -雌 -滑 -馀 -了 -机 -块 -司 -宰 -甙 -兴 -矽 -抚 -保 -用 -沧 -秩 -如 -收 -息 -滥 -页 -疑 -埠 -! -! -姥 -异 -橹 -钇 -向 -下 -跄 -的 -椴 -沫 -国 -绥 -獠 -报 -开 -民 -蜇 -何 -分 -凇 -长 -讥 -藏 -掏 -施 -羽 -中 -讲 -派 -嘟 -人 -提 -浼 -间 -世 -而 -古 -多 -倪 -唇 -饯 -控 -庚 -首 -赛 -蜓 -味 -断 -制 -觉 -技 -替 -艰 -溢 -潮 -夕 -钺 -外 -摘 -枋 -动 -双 -单 -啮 -户 -枇 -确 -锦 -曜 -杜 -或 -能 -效 -霜 -盒 -然 -侗 -电 -晁 -放 -步 -鹃 -新 -杖 -蜂 -吒 -濂 -瞬 -评 -总 -隍 -对 -独 -合 -也 -是 -府 -青 -天 -诲 -墙 -组 -滴 -级 -邀 -帘 -示 -已 -时 -骸 -仄 -泅 -和 -遨 -店 -雇 -疫 -持 -巍 -踮 -境 -只 -亨 -目 -鉴 -崤 -闲 -体 -泄 -杂 -作 -般 -轰 -化 -解 -迂 -诿 -蛭 -璀 -腾 -告 -版 -服 -省 -师 -小 -规 -程 -线 -海 -办 -引 -二 -桧 -牌 -砺 -洄 -裴 -修 -图 -痫 -胡 -许 -犊 -事 -郛 -基 -柴 -呼 -食 -研 -奶 -律 -蛋 -因 -葆 -察 -戏 -褒 -戒 -再 -李 -骁 -工 -貂 -油 -鹅 -章 -啄 -休 -场 -给 -睡 -纷 -豆 -器 -捎 -说 -敏 -学 -会 -浒 -设 -诊 -格 -廓 -查 -来 -霓 -室 -溆 -¢ -诡 -寥 -焕 -舜 -柒 -狐 -回 -戟 -砾 -厄 -实 -翩 -尿 -五 -入 -径 -惭 -喹 -股 -宇 -篝 -| -; -美 -期 -云 -九 -祺 -扮 -靠 -锝 -槌 -系 -企 -酰 -阊 -暂 -蚕 -忻 -豁 -本 -羹 -执 -条 -钦 -H -獒 -限 -进 -季 -楦 -于 -芘 -玖 -铋 -茯 -未 -答 -粘 -括 -样 -精 -欠 -矢 -甥 -帷 -嵩 -扣 -令 -仔 -风 -皈 -行 -支 -部 -蓉 -刮 -站 -蜡 -救 -钊 -汗 -松 -嫌 -成 -可 -. -鹤 -院 -从 -交 -政 -怕 -活 -调 -球 -局 -验 -髌 -第 -韫 -谗 -串 -到 -圆 -年 -米 -/ -* -友 -忿 -检 -区 -看 -自 -敢 -刃 -个 -兹 -弄 -流 -留 -同 -没 -齿 -星 -聆 -轼 -湖 -什 -三 -建 -蛔 -儿 -椋 -汕 -震 -颧 -鲤 -跟 -力 -情 -璺 -铨 -陪 -务 -指 -族 -训 -滦 -鄣 -濮 -扒 -商 -箱 -十 -召 -慷 -辗 -所 -莞 -管 -护 -臭 -横 -硒 -嗓 -接 -侦 -六 -露 -党 -馋 -驾 -剖 -高 -侬 -妪 -幂 -猗 -绺 -骐 -央 -酐 -孝 -筝 -课 -徇 -缰 -门 -男 -西 -项 -句 -谙 -瞒 -秃 -篇 -教 -碲 -罚 -声 -呐 -景 -前 -富 -嘴 -鳌 -稀 -免 -朋 -啬 -睐 -去 -赈 -鱼 -住 -肩 -愕 -速 -旁 -波 -厅 -健 -茼 -厥 -鲟 -谅 -投 -攸 -炔 -数 -方 -击 -呋 -谈 -绩 -别 -愫 -僚 -躬 -鹧 -胪 -炳 -招 -喇 -膨 -泵 -蹦 -毛 -结 -5 -4 -谱 -识 -陕 -粽 -婚 -拟 -构 -且 -搜 -任 -潘 -比 -郢 -妨 -醪 -陀 -桔 -碘 -扎 -选 -哈 -骷 -楷 -亿 -明 -缆 -脯 -监 -睫 -逻 -婵 -共 -赴 -淝 -凡 -惦 -及 -达 -揖 -谩 -澹 -减 -焰 -蛹 -番 -祁 -柏 -员 -禄 -怡 -峤 -龙 -白 -叽 -生 -闯 -起 -细 -装 -谕 -竟 -聚 -钙 -上 -导 -渊 -按 -艾 -辘 -挡 -耒 -盹 -饪 -臀 -记 -邮 -蕙 -受 -各 -医 -搂 -普 -滇 -朗 -茸 -带 -翻 -酚 -( -光 -堤 -墟 -蔷 -万 -幻 -〓 -瑙 -辈 -昧 -盏 -亘 -蛀 -吉 -铰 -请 -子 -假 -闻 -税 -井 -诩 -哨 -嫂 -好 -面 -琐 -校 -馊 -鬣 -缂 -营 -访 -炖 -占 -农 -缀 -否 -经 -钚 -棵 -趟 -张 -亟 -吏 -茶 -谨 -捻 -论 -迸 -堂 -玉 -信 -吧 -瞠 -乡 -姬 -寺 -咬 -溏 -苄 -皿 -意 -赉 -宝 -尔 -钰 -艺 -特 -唳 -踉 -都 -荣 -倚 -登 -荐 -丧 -奇 -涵 -批 -炭 -近 -符 -傩 -感 -道 -着 -菊 -虹 -仲 -众 -懈 -濯 -颞 -眺 -南 -释 -北 -缝 -标 -既 -茗 -整 -撼 -迤 -贲 -挎 -耱 -拒 -某 -妍 -卫 -哇 -英 -矶 -藩 -治 -他 -元 -领 -膜 -遮 -穗 -蛾 -飞 -荒 -棺 -劫 -么 -市 -火 -温 -拈 -棚 -洼 -转 -果 -奕 -卸 -迪 -伸 -泳 -斗 -邡 -侄 -涨 -屯 -萋 -胭 -氡 -崮 -枞 -惧 -冒 -彩 -斜 -手 -豚 -随 -旭 -淑 -妞 -形 -菌 -吲 -沱 -争 -驯 -歹 -挟 -兆 -柱 -传 -至 -包 -内 -响 -临 -红 -功 -弩 -衡 -寂 -禁 -老 -棍 -耆 -渍 -织 -害 -氵 -渑 -布 -载 -靥 -嗬 -虽 -苹 -咨 -娄 -库 -雉 -榜 -帜 -嘲 -套 -瑚 -亲 -簸 -欧 -边 -6 -腿 -旮 -抛 -吹 -瞳 -得 -镓 -梗 -厨 -继 -漾 -愣 -憨 -士 -策 -窑 -抑 -躯 -襟 -脏 -参 -贸 -言 -干 -绸 -鳄 -穷 -藜 -音 -折 -详 -) -举 -悍 -甸 -癌 -黎 -谴 -死 -罩 -迁 -寒 -驷 -袖 -媒 -蒋 -掘 -模 -纠 -恣 -观 -祖 -蛆 -碍 -位 -稿 -主 -澧 -跌 -筏 -京 -锏 -帝 -贴 -证 -糠 -才 -黄 -鲸 -略 -炯 -饱 -四 -出 -园 -犀 -牧 -容 -汉 -杆 -浈 -汰 -瑷 -造 -虫 -瘩 -怪 -驴 -济 -应 -花 -沣 -谔 -夙 -旅 -价 -矿 -以 -考 -s -u -呦 -晒 -巡 -茅 -准 -肟 -瓴 -詹 -仟 -褂 -译 -桌 -混 -宁 -怦 -郑 -抿 -些 -余 -鄂 -饴 -攒 -珑 -群 -阖 -岔 -琨 -藓 -预 -环 -洮 -岌 -宀 -杲 -瀵 -最 -常 -囡 -周 -踊 -女 -鼓 -袭 -喉 -简 -范 -薯 -遐 -疏 -粱 -黜 -禧 -法 -箔 -斤 -遥 -汝 -奥 -直 -贞 -撑 -置 -绱 -集 -她 -馅 -逗 -钧 -橱 -魉 -[ -恙 -躁 -唤 -9 -旺 -膘 -待 -脾 -惫 -购 -吗 -依 -盲 -度 -瘿 -蠖 -俾 -之 -镗 -拇 -鲵 -厝 -簧 -续 -款 -展 -啃 -表 -剔 -品 -钻 -腭 -损 -清 -锶 -统 -涌 -寸 -滨 -贪 -链 -吠 -冈 -伎 -迥 -咏 -吁 -览 -防 -迅 -失 -汾 -阔 -逵 -绀 -蔑 -列 -川 -凭 -努 -熨 -揪 -利 -俱 -绉 -抢 -鸨 -我 -即 -责 -膦 -易 -毓 -鹊 -刹 -玷 -岿 -空 -嘞 -绊 -排 -术 -估 -锷 -违 -们 -苟 -铜 -播 -肘 -件 -烫 -审 -鲂 -广 -像 -铌 -惰 -铟 -巳 -胍 -鲍 -康 -憧 -色 -恢 -想 -拷 -尤 -疳 -知 -S -Y -F -D -A -峄 -裕 -帮 -握 -搔 -氐 -氘 -难 -墒 -沮 -雨 -叁 -缥 -悴 -藐 -湫 -娟 -苑 -稠 -颛 -簇 -后 -阕 -闭 -蕤 -缚 -怎 -佞 -码 -嘤 -蔡 -痊 -舱 -螯 -帕 -赫 -昵 -升 -烬 -岫 -、 -疵 -蜻 -髁 -蕨 -隶 -烛 -械 -丑 -盂 -梁 -强 -鲛 -由 -拘 -揉 -劭 -龟 -撤 -钩 -呕 -孛 -费 -妻 -漂 -求 -阑 -崖 -秤 -甘 -通 -深 -补 -赃 -坎 -床 -啪 -承 -吼 -量 -暇 -钼 -烨 -阂 -擎 -脱 -逮 -称 -P -神 -属 -矗 -华 -届 -狍 -葑 -汹 -育 -患 -窒 -蛰 -佼 -静 -槎 -运 -鳗 -庆 -逝 -曼 -疱 -克 -代 -官 -此 -麸 -耧 -蚌 -晟 -例 -础 -榛 -副 -测 -唰 -缢 -迹 -灬 -霁 -身 -岁 -赭 -扛 -又 -菡 -乜 -雾 -板 -读 -陷 -徉 -贯 -郁 -虑 -变 -钓 -菜 -圾 -现 -琢 -式 -乐 -维 -渔 -浜 -左 -吾 -脑 -钡 -警 -T -啵 -拴 -偌 -漱 -湿 -硕 -止 -骼 -魄 -积 -燥 -联 -踢 -玛 -则 -窿 -见 -振 -畿 -送 -班 -钽 -您 -赵 -刨 -印 -讨 -踝 -籍 -谡 -舌 -崧 -汽 -蔽 -沪 -酥 -绒 -怖 -财 -帖 -肱 -私 -莎 -勋 -羔 -霸 -励 -哼 -帐 -将 -帅 -渠 -纪 -婴 -娩 -岭 -厘 -滕 -吻 -伤 -坝 -冠 -戊 -隆 -瘁 -介 -涧 -物 -黍 -并 -姗 -奢 -蹑 -掣 -垸 -锴 -命 -箍 -捉 -病 -辖 -琰 -眭 -迩 -艘 -绌 -繁 -寅 -若 -毋 -思 -诉 -类 -诈 -燮 -轲 -酮 -狂 -重 -反 -职 -筱 -县 -委 -磕 -绣 -奖 -晋 -濉 -志 -徽 -肠 -呈 -獐 -坻 -口 -片 -碰 -几 -村 -柿 -劳 -料 -获 -亩 -惕 -晕 -厌 -号 -罢 -池 -正 -鏖 -煨 -家 -棕 -复 -尝 -懋 -蜥 -锅 -岛 -扰 -队 -坠 -瘾 -钬 -@ -卧 -疣 -镇 -譬 -冰 -彷 -频 -黯 -据 -垄 -采 -八 -缪 -瘫 -型 -熹 -砰 -楠 -襁 -箐 -但 -嘶 -绳 -啤 -拍 -盥 -穆 -傲 -洗 -盯 -塘 -怔 -筛 -丿 -台 -恒 -喂 -葛 -永 -¥ -烟 -酒 -桦 -书 -砂 -蚝 -缉 -态 -瀚 -袄 -圳 -轻 -蛛 -超 -榧 -遛 -姒 -奘 -铮 -右 -荽 -望 -偻 -卡 -丶 -氰 -附 -做 -革 -索 -戚 -坨 -桷 -唁 -垅 -榻 -岐 -偎 -坛 -莨 -山 -殊 -微 -骇 -陈 -爨 -推 -嗝 -驹 -澡 -藁 -呤 -卤 -嘻 -糅 -逛 -侵 -郓 -酌 -德 -摇 -※ -鬃 -被 -慨 -殡 -羸 -昌 -泡 -戛 -鞋 -河 -宪 -沿 -玲 -鲨 -翅 -哽 -源 -铅 -语 -照 -邯 -址 -荃 -佬 -顺 -鸳 -町 -霭 -睾 -瓢 -夸 -椁 -晓 -酿 -痈 -咔 -侏 -券 -噎 -湍 -签 -嚷 -离 -午 -尚 -社 -锤 -背 -孟 -使 -浪 -缦 -潍 -鞅 -军 -姹 -驶 -笑 -鳟 -鲁 -》 -孽 -钜 -绿 -洱 -礴 -焯 -椰 -颖 -囔 -乌 -孔 -巴 -互 -性 -椽 -哞 -聘 -昨 -早 -暮 -胶 -炀 -隧 -低 -彗 -昝 -铁 -呓 -氽 -藉 -喔 -癖 -瑗 -姨 -权 -胱 -韦 -堑 -蜜 -酋 -楝 -砝 -毁 -靓 -歙 -锲 -究 -屋 -喳 -骨 -辨 -碑 -武 -鸠 -宫 -辜 -烊 -适 -坡 -殃 -培 -佩 -供 -走 -蜈 -迟 -翼 -况 -姣 -凛 -浔 -吃 -飘 -债 -犟 -金 -促 -苛 -崇 -坂 -莳 -畔 -绂 -兵 -蠕 -斋 -根 -砍 -亢 -欢 -恬 -崔 -剁 -餐 -榫 -快 -扶 -‖ -濒 -缠 -鳜 -当 -彭 -驭 -浦 -篮 -昀 -锆 -秸 -钳 -弋 -娣 -瞑 -夷 -龛 -苫 -拱 -致 -% -嵊 -障 -隐 -弑 -初 -娓 -抉 -汩 -累 -蓖 -" -唬 -助 -苓 -昙 -押 -毙 -破 -城 -郧 -逢 -嚏 -獭 -瞻 -溱 -婿 -赊 -跨 -恼 -璧 -萃 -姻 -貉 -灵 -炉 -密 -氛 -陶 -砸 -谬 -衔 -点 -琛 -沛 -枳 -层 -岱 -诺 -脍 -榈 -埂 -征 -冷 -裁 -打 -蹴 -素 -瘘 -逞 -蛐 -聊 -激 -腱 -萘 -踵 -飒 -蓟 -吆 -取 -咙 -簋 -涓 -矩 -曝 -挺 -揣 -座 -你 -史 -舵 -焱 -尘 -苏 -笈 -脚 -溉 -榨 -诵 -樊 -邓 -焊 -义 -庶 -儋 -蟋 -蒲 -赦 -呷 -杞 -诠 -豪 -还 -试 -颓 -茉 -太 -除 -紫 -逃 -痴 -草 -充 -鳕 -珉 -祗 -墨 -渭 -烩 -蘸 -慕 -璇 -镶 -穴 -嵘 -恶 -骂 -险 -绋 -幕 -碉 -肺 -戳 -刘 -潞 -秣 -纾 -潜 -銮 -洛 -须 -罘 -销 -瘪 -汞 -兮 -屉 -r -林 -厕 -质 -探 -划 -狸 -殚 -善 -煊 -烹 -〒 -锈 -逯 -宸 -辍 -泱 -柚 -袍 -远 -蹋 -嶙 -绝 -峥 -娥 -缍 -雀 -徵 -认 -镱 -谷 -= -贩 -勉 -撩 -鄯 -斐 -洋 -非 -祚 -泾 -诒 -饿 -撬 -威 -晷 -搭 -芍 -锥 -笺 -蓦 -候 -琊 -档 -礁 -沼 -卵 -荠 -忑 -朝 -凹 -瑞 -头 -仪 -弧 -孵 -畏 -铆 -突 -衲 -车 -浩 -气 -茂 -悖 -厢 -枕 -酝 -戴 -湾 -邹 -飚 -攘 -锂 -写 -宵 -翁 -岷 -无 -喜 -丈 -挑 -嗟 -绛 -殉 -议 -槽 -具 -醇 -淞 -笃 -郴 -阅 -饼 -底 -壕 -砚 -弈 -询 -缕 -庹 -翟 -零 -筷 -暨 -舟 -闺 -甯 -撞 -麂 -茌 -蔼 -很 -珲 -捕 -棠 -角 -阉 -媛 -娲 -诽 -剿 -尉 -爵 -睬 -韩 -诰 -匣 -危 -糍 -镯 -立 -浏 -阳 -少 -盆 -舔 -擘 -匪 -申 -尬 -铣 -旯 -抖 -赘 -瓯 -居 -ˇ -哮 -游 -锭 -茏 -歌 -坏 -甚 -秒 -舞 -沙 -仗 -劲 -潺 -阿 -燧 -郭 -嗖 -霏 -忠 -材 -奂 -耐 -跺 -砀 -输 -岖 -媳 -氟 -极 -摆 -灿 -今 -扔 -腻 -枝 -奎 -药 -熄 -吨 -话 -q -额 -慑 -嘌 -协 -喀 -壳 -埭 -视 -著 -於 -愧 -陲 -翌 -峁 -颅 -佛 -腹 -聋 -侯 -咎 -叟 -秀 -颇 -存 -较 -罪 -哄 -岗 -扫 -栏 -钾 -羌 -己 -璨 -枭 -霉 -煌 -涸 -衿 -键 -镝 -益 -岢 -奏 -连 -夯 -睿 -冥 -均 -糖 -狞 -蹊 -稻 -爸 -刿 -胥 -煜 -丽 -肿 -璃 -掸 -跚 -灾 -垂 -樾 -濑 -乎 -莲 -窄 -犹 -撮 -战 -馄 -软 -络 -显 -鸢 -胸 -宾 -妲 -恕 -埔 -蝌 -份 -遇 -巧 -瞟 -粒 -恰 -剥 -桡 -博 -讯 -凯 -堇 -阶 -滤 -卖 -斌 -骚 -彬 -兑 -磺 -樱 -舷 -两 -娱 -福 -仃 -差 -找 -桁 -÷ -净 -把 -阴 -污 -戬 -雷 -碓 -蕲 -楚 -罡 -焖 -抽 -妫 -咒 -仑 -闱 -尽 -邑 -菁 -爱 -贷 -沥 -鞑 -牡 -嗉 -崴 -骤 -塌 -嗦 -订 -拮 -滓 -捡 -锻 -次 -坪 -杩 -臃 -箬 -融 -珂 -鹗 -宗 -枚 -降 -鸬 -妯 -阄 -堰 -盐 -毅 -必 -杨 -崃 -俺 -甬 -状 -莘 -货 -耸 -菱 -腼 -铸 -唏 -痤 -孚 -澳 -懒 -溅 -翘 -疙 -杷 -淼 -缙 -骰 -喊 -悉 -砻 -坷 -艇 -赁 -界 -谤 -纣 -宴 -晃 -茹 -归 -饭 -梢 -铡 -街 -抄 -肼 -鬟 -苯 -颂 -撷 -戈 -炒 -咆 -茭 -瘙 -负 -仰 -客 -琉 -铢 -封 -卑 -珥 -椿 -镧 -窨 -鬲 -寿 -御 -袤 -铃 -萎 -砖 -餮 -脒 -裳 -肪 -孕 -嫣 -馗 -嵇 -恳 -氯 -江 -石 -褶 -冢 -祸 -阻 -狈 -羞 -银 -靳 -透 -咳 -叼 -敷 -芷 -啥 -它 -瓤 -兰 -痘 -懊 -逑 -肌 -往 -捺 -坊 -甩 -呻 -〃 -沦 -忘 -膻 -祟 -菅 -剧 -崆 -智 -坯 -臧 -霍 -墅 -攻 -眯 -倘 -拢 -骠 -铐 -庭 -岙 -瓠 -′ -缺 -泥 -迢 -捶 -? -? -郏 -喙 -掷 -沌 -纯 -秘 -种 -听 -绘 -固 -螨 -团 -香 -盗 -妒 -埚 -蓝 -拖 -旱 -荞 -铀 -血 -遏 -汲 -辰 -叩 -拽 -幅 -硬 -惶 -桀 -漠 -措 -泼 -唑 -齐 -肾 -念 -酱 -虚 -屁 -耶 -旗 -砦 -闵 -婉 -馆 -拭 -绅 -韧 -忏 -窝 -醋 -葺 -顾 -辞 -倜 -堆 -辋 -逆 -玟 -贱 -疾 -董 -惘 -倌 -锕 -淘 -嘀 -莽 -俭 -笏 -绑 -鲷 -杈 -择 -蟀 -粥 -嗯 -驰 -逾 -案 -谪 -褓 -胫 -哩 -昕 -颚 -鲢 -绠 -躺 -鹄 -崂 -儒 -俨 -丝 -尕 -泌 -啊 -萸 -彰 -幺 -吟 -骄 -苣 -弦 -脊 -瑰 -〈 -诛 -镁 -析 -闪 -剪 -侧 -哟 -框 -螃 -守 -嬗 -燕 -狭 -铈 -缮 -概 -迳 -痧 -鲲 -俯 -售 -笼 -痣 -扉 -挖 -满 -咋 -援 -邱 -扇 -歪 -便 -玑 -绦 -峡 -蛇 -叨 -〖 -泽 -胃 -斓 -喋 -怂 -坟 -猪 -该 -蚬 -炕 -弥 -赞 -棣 -晔 -娠 -挲 -狡 -创 -疖 -铕 -镭 -稷 -挫 -弭 -啾 -翔 -粉 -履 -苘 -哦 -楼 -秕 -铂 -土 -锣 -瘟 -挣 -栉 -习 -享 -桢 -袅 -磨 -桂 -谦 -延 -坚 -蔚 -噗 -署 -谟 -猬 -钎 -恐 -嬉 -雒 -倦 -衅 -亏 -璩 -睹 -刻 -殿 -王 -算 -雕 -麻 -丘 -柯 -骆 -丸 -塍 -谚 -添 -鲈 -垓 -桎 -蚯 -芥 -予 -飕 -镦 -谌 -窗 -醚 -菀 -亮 -搪 -莺 -蒿 -羁 -足 -J -真 -轶 -悬 -衷 -靛 -翊 -掩 -哒 -炅 -掐 -冼 -妮 -l -谐 -稚 -荆 -擒 -犯 -陵 -虏 -浓 -崽 -刍 -陌 -傻 -孜 -千 -靖 -演 -矜 -钕 -煽 -杰 -酗 -渗 -伞 -栋 -俗 -泫 -戍 -罕 -沾 -疽 -灏 -煦 -芬 -磴 -叱 -阱 -榉 -湃 -蜀 -叉 -醒 -彪 -租 -郡 -篷 -屎 -良 -垢 -隗 -弱 -陨 -峪 -砷 -掴 -颁 -胎 -雯 -绵 -贬 -沐 -撵 -隘 -篙 -暖 -曹 -陡 -栓 -填 -臼 -彦 -瓶 -琪 -潼 -哪 -鸡 -摩 -啦 -俟 -锋 -域 -耻 -蔫 -疯 -纹 -撇 -毒 -绶 -痛 -酯 -忍 -爪 -赳 -歆 -嘹 -辕 -烈 -册 -朴 -钱 -吮 -毯 -癜 -娃 -谀 -邵 -厮 -炽 -璞 -邃 -丐 -追 -词 -瓒 -忆 -轧 -芫 -谯 -喷 -弟 -半 -冕 -裙 -掖 -墉 -绮 -寝 -苔 -势 -顷 -褥 -切 -衮 -君 -佳 -嫒 -蚩 -霞 -佚 -洙 -逊 -镖 -暹 -唛 -& -殒 -顶 -碗 -獗 -轭 -铺 -蛊 -废 -恹 -汨 -崩 -珍 -那 -杵 -曲 -纺 -夏 -薰 -傀 -闳 -淬 -姘 -舀 -拧 -卷 -楂 -恍 -讪 -厩 -寮 -篪 -赓 -乘 -灭 -盅 -鞣 -沟 -慎 -挂 -饺 -鼾 -杳 -树 -缨 -丛 -絮 -娌 -臻 -嗳 -篡 -侩 -述 -衰 -矛 -圈 -蚜 -匕 -筹 -匿 -濞 -晨 -叶 -骋 -郝 -挚 -蚴 -滞 -增 -侍 -描 -瓣 -吖 -嫦 -蟒 -匾 -圣 -赌 -毡 -癞 -恺 -百 -曳 -需 -篓 -肮 -庖 -帏 -卿 -驿 -遗 -蹬 -鬓 -骡 -歉 -芎 -胳 -屐 -禽 -烦 -晌 -寄 -媾 -狄 -翡 -苒 -船 -廉 -终 -痞 -殇 -々 -畦 -饶 -改 -拆 -悻 -萄 -£ -瓿 -乃 -訾 -桅 -匮 -溧 -拥 -纱 -铍 -骗 -蕃 -龋 -缬 -父 -佐 -疚 -栎 -醍 -掳 -蓄 -x -惆 -颜 -鲆 -榆 -〔 -猎 -敌 -暴 -谥 -鲫 -贾 -罗 -玻 -缄 -扦 -芪 -癣 -落 -徒 -臾 -恿 -猩 -托 -邴 -肄 -牵 -春 -陛 -耀 -刊 -拓 -蓓 -邳 -堕 -寇 -枉 -淌 -啡 -湄 -兽 -酷 -萼 -碚 -濠 -萤 -夹 -旬 -戮 -梭 -琥 -椭 -昔 -勺 -蜊 -绐 -晚 -孺 -僵 -宣 -摄 -冽 -旨 -萌 -忙 -蚤 -眉 -噼 -蟑 -付 -契 -瓜 -悼 -颡 -壁 -曾 -窕 -颢 -澎 -仿 -俑 -浑 -嵌 -浣 -乍 -碌 -褪 -乱 -蔟 -隙 -玩 -剐 -葫 -箫 -纲 -围 -伐 -决 -伙 -漩 -瑟 -刑 -肓 -镳 -缓 -蹭 -氨 -皓 -典 -畲 -坍 -铑 -檐 -塑 -洞 -倬 -储 -胴 -淳 -戾 -吐 -灼 -惺 -妙 -毕 -珐 -缈 -虱 -盖 -羰 -鸿 -磅 -谓 -髅 -娴 -苴 -唷 -蚣 -霹 -抨 -贤 -唠 -犬 -誓 -逍 -庠 -逼 -麓 -籼 -釉 -呜 -碧 -秧 -氩 -摔 -霄 -穸 -纨 -辟 -妈 -映 -完 -牛 -缴 -嗷 -炊 -恩 -荔 -茆 -掉 -紊 -慌 -莓 -羟 -阙 -萁 -磐 -另 -蕹 -辱 -鳐 -湮 -吡 -吩 -唐 -睦 -垠 -舒 -圜 -冗 -瞿 -溺 -芾 -囱 -匠 -僳 -汐 -菩 -饬 -漓 -黑 -霰 -浸 -濡 -窥 -毂 -蒡 -兢 -驻 -鹉 -芮 -诙 -迫 -雳 -厂 -忐 -臆 -猴 -鸣 -蚪 -栈 -箕 -羡 -渐 -莆 -捍 -眈 -哓 -趴 -蹼 -埕 -嚣 -骛 -宏 -淄 -斑 -噜 -严 -瑛 -垃 -椎 -诱 -压 -庾 -绞 -焘 -廿 -抡 -迄 -棘 -夫 -纬 -锹 -眨 -瞌 -侠 -脐 -竞 -瀑 -孳 -骧 -遁 -姜 -颦 -荪 -滚 -萦 -伪 -逸 -粳 -爬 -锁 -矣 -役 -趣 -洒 -颔 -诏 -逐 -奸 -甭 -惠 -攀 -蹄 -泛 -尼 -拼 -阮 -鹰 -亚 -颈 -惑 -勒 -〉 -际 -肛 -爷 -刚 -钨 -丰 -养 -冶 -鲽 -辉 -蔻 -画 -覆 -皴 -妊 -麦 -返 -醉 -皂 -擀 -〗 -酶 -凑 -粹 -悟 -诀 -硖 -港 -卜 -z -杀 -涕 -± -舍 -铠 -抵 -弛 -段 -敝 -镐 -奠 -拂 -轴 -跛 -袱 -e -t -沉 -菇 -俎 -薪 -峦 -秭 -蟹 -历 -盟 -菠 -寡 -液 -肢 -喻 -染 -裱 -悱 -抱 -氙 -赤 -捅 -猛 -跑 -氮 -谣 -仁 -尺 -辊 -窍 -烙 -衍 -架 -擦 -倏 -璐 -瑁 -币 -楞 -胖 -夔 -趸 -邛 -惴 -饕 -虔 -蝎 -§ -哉 -贝 -宽 -辫 -炮 -扩 -饲 -籽 -魏 -菟 -锰 -伍 -猝 -末 -琳 -哚 -蛎 -邂 -呀 -姿 -鄞 -却 -歧 -仙 -恸 -椐 -森 -牒 -寤 -袒 -婆 -虢 -雅 -钉 -朵 -贼 -欲 -苞 -寰 -故 -龚 -坭 -嘘 -咫 -礼 -硷 -兀 -睢 -汶 -’ -铲 -烧 -绕 -诃 -浃 -钿 -哺 -柜 -讼 -颊 -璁 -腔 -洽 -咐 -脲 -簌 -筠 -镣 -玮 -鞠 -谁 -兼 -姆 -挥 -梯 -蝴 -谘 -漕 -刷 -躏 -宦 -弼 -b -垌 -劈 -麟 -莉 -揭 -笙 -渎 -仕 -嗤 -仓 -配 -怏 -抬 -错 -泯 -镊 -孰 -猿 -邪 -仍 -秋 -鼬 -壹 -歇 -吵 -炼 -< -尧 -射 -柬 -廷 -胧 -霾 -凳 -隋 -肚 -浮 -梦 -祥 -株 -堵 -退 -L -鹫 -跎 -凶 -毽 -荟 -炫 -栩 -玳 -甜 -沂 -鹿 -顽 -伯 -爹 -赔 -蛴 -徐 -匡 -欣 -狰 -缸 -雹 -蟆 -疤 -默 -沤 -啜 -痂 -衣 -禅 -w -i -h -辽 -葳 -黝 -钗 -停 -沽 -棒 -馨 -颌 -肉 -吴 -硫 -悯 -劾 -娈 -马 -啧 -吊 -悌 -镑 -峭 -帆 -瀣 -涉 -咸 -疸 -滋 -泣 -翦 -拙 -癸 -钥 -蜒 -+ -尾 -庄 -凝 -泉 -婢 -渴 -谊 -乞 -陆 -锉 -糊 -鸦 -淮 -I -B -N -晦 -弗 -乔 -庥 -葡 -尻 -席 -橡 -傣 -渣 -拿 -惩 -麋 -斛 -缃 -矮 -蛏 -岘 -鸽 -姐 -膏 -催 -奔 -镒 -喱 -蠡 -摧 -钯 -胤 -柠 -拐 -璋 -鸥 -卢 -荡 -倾 -^ -_ -珀 -逄 -萧 -塾 -掇 -贮 -笆 -聂 -圃 -冲 -嵬 -M -滔 -笕 -值 -炙 -偶 -蜱 -搐 -梆 -汪 -蔬 -腑 -鸯 -蹇 -敞 -绯 -仨 -祯 -谆 -梧 -糗 -鑫 -啸 -豺 -囹 -猾 -巢 -柄 -瀛 -筑 -踌 -沭 -暗 -苁 -鱿 -蹉 -脂 -蘖 -牢 -热 -木 -吸 -溃 -宠 -序 -泞 -偿 -拜 -檩 -厚 -朐 -毗 -螳 -吞 -媚 -朽 -担 -蝗 -橘 -畴 -祈 -糟 -盱 -隼 -郜 -惜 -珠 -裨 -铵 -焙 -琚 -唯 -咚 -噪 -骊 -丫 -滢 -勤 -棉 -呸 -咣 -淀 -隔 -蕾 -窈 -饨 -挨 -煅 -短 -匙 -粕 -镜 -赣 -撕 -墩 -酬 -馁 -豌 -颐 -抗 -酣 -氓 -佑 -搁 -哭 -递 -耷 -涡 -桃 -贻 -碣 -截 -瘦 -昭 -镌 -蔓 -氚 -甲 -猕 -蕴 -蓬 -散 -拾 -纛 -狼 -猷 -铎 -埋 -旖 -矾 -讳 -囊 -糜 -迈 -粟 -蚂 -紧 -鲳 -瘢 -栽 -稼 -羊 -锄 -斟 -睁 -桥 -瓮 -蹙 -祉 -醺 -鼻 -昱 -剃 -跳 -篱 -跷 -蒜 -翎 -宅 -晖 -嗑 -壑 -峻 -癫 -屏 -狠 -陋 -袜 -途 -憎 -祀 -莹 -滟 -佶 -溥 -臣 -约 -盛 -峰 -磁 -慵 -婪 -拦 -莅 -朕 -鹦 -粲 -裤 -哎 -疡 -嫖 -琵 -窟 -堪 -谛 -嘉 -儡 -鳝 -斩 -郾 -驸 -酊 -妄 -胜 -贺 -徙 -傅 -噌 -钢 -栅 -庇 -恋 -匝 -巯 -邈 -尸 -锚 -粗 -佟 -蛟 -薹 -纵 -蚊 -郅 -绢 -锐 -苗 -俞 -篆 -淆 -膀 -鲜 -煎 -诶 -秽 -寻 -涮 -刺 -怀 -噶 -巨 -褰 -魅 -灶 -灌 -桉 -藕 -谜 -舸 -薄 -搀 -恽 -借 -牯 -痉 -渥 -愿 -亓 -耘 -杠 -柩 -锔 -蚶 -钣 -珈 -喘 -蹒 -幽 -赐 -稗 -晤 -莱 -泔 -扯 -肯 -菪 -裆 -腩 -豉 -疆 -骜 -腐 -倭 -珏 -唔 -粮 -亡 -润 -慰 -伽 -橄 -玄 -誉 -醐 -胆 -龊 -粼 -塬 -陇 -彼 -削 -嗣 -绾 -芽 -妗 -垭 -瘴 -爽 -薏 -寨 -龈 -泠 -弹 -赢 -漪 -猫 -嘧 -涂 -恤 -圭 -茧 -烽 -屑 -痕 -巾 -赖 -荸 -凰 -腮 -畈 -亵 -蹲 -偃 -苇 -澜 -艮 -换 -骺 -烘 -苕 -梓 -颉 -肇 -哗 -悄 -氤 -涠 -葬 -屠 -鹭 -植 -竺 -佯 -诣 -鲇 -瘀 -鲅 -邦 -移 -滁 -冯 -耕 -癔 -戌 -茬 -沁 -巩 -悠 -湘 -洪 -痹 -锟 -循 -谋 -腕 -鳃 -钠 -捞 -焉 -迎 -碱 -伫 -急 -榷 -奈 -邝 -卯 -辄 -皲 -卟 -醛 -畹 -忧 -稳 -雄 -昼 -缩 -阈 -睑 -扌 -耗 -曦 -涅 -捏 -瞧 -邕 -淖 -漉 -铝 -耦 -禹 -湛 -喽 -莼 -琅 -诸 -苎 -纂 -硅 -始 -嗨 -傥 -燃 -臂 -赅 -嘈 -呆 -贵 -屹 -壮 -肋 -亍 -蚀 -卅 -豹 -腆 -邬 -迭 -浊 -} -童 -螂 -捐 -圩 -勐 -触 -寞 -汊 -壤 -荫 -膺 -渌 -芳 -懿 -遴 -螈 -泰 -蓼 -蛤 -茜 -舅 -枫 -朔 -膝 -眙 -避 -梅 -判 -鹜 -璜 -牍 -缅 -垫 -藻 -黔 -侥 -惚 -懂 -踩 -腰 -腈 -札 -丞 -唾 -慈 -顿 -摹 -荻 -琬 -~ -斧 -沈 -滂 -胁 -胀 -幄 -莜 -Z -匀 -鄄 -掌 -绰 -茎 -焚 -赋 -萱 -谑 -汁 -铒 -瞎 -夺 -蜗 -野 -娆 -冀 -弯 -篁 -懵 -灞 -隽 -芡 -脘 -俐 -辩 -芯 -掺 -喏 -膈 -蝈 -觐 -悚 -踹 -蔗 -熠 -鼠 -呵 -抓 -橼 -峨 -畜 -缔 -禾 -崭 -弃 -熊 -摒 -凸 -拗 -穹 -蒙 -抒 -祛 -劝 -闫 -扳 -阵 -醌 -踪 -喵 -侣 -搬 -仅 -荧 -赎 -蝾 -琦 -买 -婧 -瞄 -寓 -皎 -冻 -赝 -箩 -莫 -瞰 -郊 -笫 -姝 -筒 -枪 -遣 -煸 -袋 -舆 -痱 -涛 -母 -〇 -启 -践 -耙 -绲 -盘 -遂 -昊 -搞 -槿 -诬 -纰 -泓 -惨 -檬 -亻 -越 -C -o -憩 -熵 -祷 -钒 -暧 -塔 -阗 -胰 -咄 -娶 -魔 -琶 -钞 -邻 -扬 -杉 -殴 -咽 -弓 -〆 -髻 -】 -吭 -揽 -霆 -拄 -殖 -脆 -彻 -岩 -芝 -勃 -辣 -剌 -钝 -嘎 -甄 -佘 -皖 -伦 -授 -徕 -憔 -挪 -皇 -庞 -稔 -芜 -踏 -溴 -兖 -卒 -擢 -饥 -鳞 -煲 -‰ -账 -颗 -叻 -斯 -捧 -鳍 -琮 -讹 -蛙 -纽 -谭 -酸 -兔 -莒 -睇 -伟 -觑 -羲 -嗜 -宜 -褐 -旎 -辛 -卦 -诘 -筋 -鎏 -溪 -挛 -熔 -阜 -晰 -鳅 -丢 -奚 -灸 -呱 -献 -陉 -黛 -鸪 -甾 -萨 -疮 -拯 -洲 -疹 -辑 -叙 -恻 -谒 -允 -柔 -烂 -氏 -逅 -漆 -拎 -惋 -扈 -湟 -纭 -啕 -掬 -擞 -哥 -忽 -涤 -鸵 -靡 -郗 -瓷 -扁 -廊 -怨 -雏 -钮 -敦 -E -懦 -憋 -汀 -拚 -啉 -腌 -岸 -f -痼 -瞅 -尊 -咀 -眩 -飙 -忌 -仝 -迦 -熬 -毫 -胯 -篑 -茄 -腺 -凄 -舛 -碴 -锵 -诧 -羯 -後 -漏 -汤 -宓 -仞 -蚁 -壶 -谰 -皑 -铄 -棰 -罔 -辅 -晶 -苦 -牟 -闽 -\ -烃 -饮 -聿 -丙 -蛳 -朱 -煤 -涔 -鳖 -犁 -罐 -荼 -砒 -淦 -妤 -黏 -戎 -孑 -婕 -瑾 -戢 -钵 -枣 -捋 -砥 -衩 -狙 -桠 -稣 -阎 -肃 -梏 -诫 -孪 -昶 -婊 -衫 -嗔 -侃 -塞 -蜃 -樵 -峒 -貌 -屿 -欺 -缫 -阐 -栖 -诟 -珞 -荭 -吝 -萍 -嗽 -恂 -啻 -蜴 -磬 -峋 -俸 -豫 -谎 -徊 -镍 -韬 -魇 -晴 -U -囟 -猜 -蛮 -坐 -囿 -伴 -亭 -肝 -佗 -蝠 -妃 -胞 -滩 -榴 -氖 -垩 -苋 -砣 -扪 -馏 -姓 -轩 -厉 -夥 -侈 -禀 -垒 -岑 -赏 -钛 -辐 -痔 -披 -纸 -碳 -“ -坞 -蠓 -挤 -荥 -沅 -悔 -铧 -帼 -蒌 -蝇 -a -p -y -n -g -哀 -浆 -瑶 -凿 -桶 -馈 -皮 -奴 -苜 -佤 -伶 -晗 -铱 -炬 -优 -弊 -氢 -恃 -甫 -攥 -端 -锌 -灰 -稹 -炝 -曙 -邋 -亥 -眶 -碾 -拉 -萝 -绔 -捷 -浍 -腋 -姑 -菖 -凌 -涞 -麽 -锢 -桨 -潢 -绎 -镰 -殆 -锑 -渝 -铬 -困 -绽 -觎 -匈 -糙 -暑 -裹 -鸟 -盔 -肽 -迷 -綦 -『 -亳 -佝 -俘 -钴 -觇 -骥 -仆 -疝 -跪 -婶 -郯 -瀹 -唉 -脖 -踞 -针 -晾 -忒 -扼 -瞩 -叛 -椒 -疟 -嗡 -邗 -肆 -跆 -玫 -忡 -捣 -咧 -唆 -艄 -蘑 -潦 -笛 -阚 -沸 -泻 -掊 -菽 -贫 -斥 -髂 -孢 -镂 -赂 -麝 -鸾 -屡 -衬 -苷 -恪 -叠 -希 -粤 -爻 -喝 -茫 -惬 -郸 -绻 -庸 -撅 -碟 -宄 -妹 -膛 -叮 -饵 -崛 -嗲 -椅 -冤 -搅 -咕 -敛 -尹 -垦 -闷 -蝉 -霎 -勰 -败 -蓑 -泸 -肤 -鹌 -幌 -焦 -浠 -鞍 -刁 -舰 -乙 -竿 -裔 -。 -茵 -函 -伊 -兄 -丨 -娜 -匍 -謇 -莪 -宥 -似 -蝽 -翳 -酪 -翠 -粑 -薇 -祢 -骏 -赠 -叫 -Q -噤 -噻 -竖 -芗 -莠 -潭 -俊 -羿 -耜 -O -郫 -趁 -嗪 -囚 -蹶 -芒 -洁 -笋 -鹑 -敲 -硝 -啶 -堡 -渲 -揩 -』 -携 -宿 -遒 -颍 -扭 -棱 -割 -萜 -蔸 -葵 -琴 -捂 -饰 -衙 -耿 -掠 -募 -岂 -窖 -涟 -蔺 -瘤 -柞 -瞪 -怜 -匹 -距 -楔 -炜 -哆 -秦 -缎 -幼 -茁 -绪 -痨 -恨 -楸 -娅 -瓦 -桩 -雪 -嬴 -伏 -榔 -妥 -铿 -拌 -眠 -雍 -缇 -‘ -卓 -搓 -哌 -觞 -噩 -屈 -哧 -髓 -咦 -巅 -娑 -侑 -淫 -膳 -祝 -勾 -姊 -莴 -胄 -疃 -薛 -蜷 -胛 -巷 -芙 -芋 -熙 -闰 -勿 -窃 -狱 -剩 -钏 -幢 -陟 -铛 -慧 -靴 -耍 -k -浙 -浇 -飨 -惟 -绗 -祜 -澈 -啼 -咪 -磷 -摞 -诅 -郦 -抹 -跃 -壬 -吕 -肖 -琏 -颤 -尴 -剡 -抠 -凋 -赚 -泊 -津 -宕 -殷 -倔 -氲 -漫 -邺 -涎 -怠 -$ -垮 -荬 -遵 -俏 -叹 -噢 -饽 -蜘 -孙 -筵 -疼 -鞭 -羧 -牦 -箭 -潴 -c -眸 -祭 -髯 -啖 -坳 -愁 -芩 -驮 -倡 -巽 -穰 -沃 -胚 -怒 -凤 -槛 -剂 -趵 -嫁 -v -邢 -灯 -鄢 -桐 -睽 -檗 -锯 -槟 -婷 -嵋 -圻 -诗 -蕈 -颠 -遭 -痢 -芸 -怯 -馥 -竭 -锗 -徜 -恭 -遍 -籁 -剑 -嘱 -苡 -龄 -僧 -桑 -潸 -弘 -澶 -楹 -悲 -讫 -愤 -腥 -悸 -谍 -椹 -呢 -桓 -葭 -攫 -阀 -翰 -躲 -敖 -柑 -郎 -笨 -橇 -呃 -魁 -燎 -脓 -葩 -磋 -垛 -玺 -狮 -沓 -砜 -蕊 -锺 -罹 -蕉 -翱 -虐 -闾 -巫 -旦 -茱 -嬷 -枯 -鹏 -贡 -芹 -汛 -矫 -绁 -拣 -禺 -佃 -讣 -舫 -惯 -乳 -趋 -疲 -挽 -岚 -虾 -衾 -蠹 -蹂 -飓 -氦 -铖 -孩 -稞 -瑜 -壅 -掀 -勘 -妓 -畅 -髋 -W -庐 -牲 -蓿 -榕 -练 -垣 -唱 -邸 -菲 -昆 -婺 -穿 -绡 -麒 -蚱 -掂 -愚 -泷 -涪 -漳 -妩 -娉 -榄 -讷 -觅 -旧 -藤 -煮 -呛 -柳 -腓 -叭 -庵 -烷 -阡 -罂 -蜕 -擂 -猖 -咿 -媲 -脉 -【 -沏 -貅 -黠 -熏 -哲 -烁 -坦 -酵 -兜 -× -潇 -撒 -剽 -珩 -圹 -乾 -摸 -樟 -帽 -嗒 -襄 -魂 -轿 -憬 -锡 -〕 -喃 -皆 -咖 -隅 -脸 -残 -泮 -袂 -鹂 -珊 -囤 -捆 -咤 -误 -徨 -闹 -淙 -芊 -淋 -怆 -囗 -拨 -梳 -渤 -R -G -绨 -蚓 -婀 -幡 -狩 -麾 -谢 -唢 -裸 -旌 -伉 -纶 -裂 -驳 -砼 -咛 -澄 -樨 -蹈 -宙 -澍 -倍 -貔 -操 -勇 -蟠 -摈 -砧 -虬 -够 -缁 -悦 -藿 -撸 -艹 -摁 -淹 -豇 -虎 -榭 -ˉ -吱 -d -° -喧 -荀 -踱 -侮 -奋 -偕 -饷 -犍 -惮 -坑 -璎 -徘 -宛 -妆 -袈 -倩 -窦 -昂 -荏 -乖 -K -怅 -撰 -鳙 -牙 -袁 -酞 -X -痿 -琼 -闸 -雁 -趾 -荚 -虻 -涝 -《 -杏 -韭 -偈 -烤 -绫 -鞘 -卉 -症 -遢 -蓥 -诋 -杭 -荨 -匆 -竣 -簪 -辙 -敕 -虞 -丹 -缭 -咩 -黟 -m -淤 -瑕 -咂 -铉 -硼 -茨 -嶂 -痒 -畸 -敬 -涿 -粪 -窘 -熟 -叔 -嫔 -盾 -忱 -裘 -憾 -梵 -赡 -珙 -咯 -娘 -庙 -溯 -胺 -葱 -痪 -摊 -荷 -卞 -乒 -髦 -寐 -铭 -坩 -胗 -枷 -爆 -溟 -嚼 -羚 -砬 -轨 -惊 -挠 -罄 -竽 -菏 -氧 -浅 -楣 -盼 -枢 -炸 -阆 -杯 -谏 -噬 -淇 -渺 -俪 -秆 -墓 -泪 -跻 -砌 -痰 -垡 -渡 -耽 -釜 -讶 -鳎 -煞 -呗 -韶 -舶 -绷 -鹳 -缜 -旷 -铊 -皱 -龌 -檀 -霖 -奄 -槐 -艳 -蝶 -旋 -哝 -赶 -骞 -蚧 -腊 -盈 -丁 -` -蜚 -矸 -蝙 -睨 -嚓 -僻 -鬼 -醴 -夜 -彝 -磊 -笔 -拔 -栀 -糕 -厦 -邰 -纫 -逭 -纤 -眦 -膊 -馍 -躇 -烯 -蘼 -冬 -诤 -暄 -骶 -哑 -瘠 -」 -臊 -丕 -愈 -咱 -螺 -擅 -跋 -搏 -硪 -谄 -笠 -淡 -嘿 -骅 -谧 -鼎 -皋 -姚 -歼 -蠢 -驼 -耳 -胬 -挝 -涯 -狗 -蒽 -孓 -犷 -凉 -芦 -箴 -铤 -孤 -嘛 -坤 -V -茴 -朦 -挞 -尖 -橙 -诞 -搴 -碇 -洵 -浚 -帚 -蜍 -漯 -柘 -嚎 -讽 -芭 -荤 -咻 -祠 -秉 -跖 -埃 -吓 -糯 -眷 -馒 -惹 -娼 -鲑 -嫩 -讴 -轮 -瞥 -靶 -褚 -乏 -缤 -宋 -帧 -删 -驱 -碎 -扑 -俩 -俄 -偏 -涣 -竹 -噱 -皙 -佰 -渚 -唧 -斡 -# -镉 -刀 -崎 -筐 -佣 -夭 -贰 -肴 -峙 -哔 -艿 -匐 -牺 -镛 -缘 -仡 -嫡 -劣 -枸 -堀 -梨 -簿 -鸭 -蒸 -亦 -稽 -浴 -{ -衢 -束 -槲 -j -阁 -揍 -疥 -棋 -潋 -聪 -窜 -乓 -睛 -插 -冉 -阪 -苍 -搽 -「 -蟾 -螟 -幸 -仇 -樽 -撂 -慢 -跤 -幔 -俚 -淅 -覃 -觊 -溶 -妖 -帛 -侨 -曰 -妾 -泗 -· -: -瀘 -風 -Ë -( -) -∶ -紅 -紗 -瑭 -雲 -頭 -鶏 -財 -許 -• -¥ -樂 -焗 -麗 -— -; -滙 -東 -榮 -繪 -興 -… -門 -業 -π -楊 -國 -顧 -é -盤 -寳 -Λ -龍 -鳳 -島 -誌 -緣 -結 -銭 -萬 -勝 -祎 -璟 -優 -歡 -臨 -時 -購 -= -★ -藍 -昇 -鐵 -觀 -勅 -農 -聲 -畫 -兿 -術 -發 -劉 -記 -專 -耑 -園 -書 -壴 -種 -Ο -● -褀 -號 -銀 -匯 -敟 -锘 -葉 -橪 -廣 -進 -蒄 -鑽 -阝 -祙 -貢 -鍋 -豊 -夬 -喆 -團 -閣 -開 -燁 -賓 -館 -酡 -沔 -順 -+ -硚 -劵 -饸 -陽 -車 -湓 -復 -萊 -氣 -軒 -華 -堃 -迮 -纟 -戶 -馬 -學 -裡 -電 -嶽 -獨 -マ -シ -サ -ジ -燘 -袪 -環 -❤ -臺 -灣 -専 -賣 -孖 -聖 -攝 -線 -▪ -α -傢 -俬 -夢 -達 -莊 -喬 -貝 -薩 -劍 -羅 -壓 -棛 -饦 -尃 -璈 -囍 -醫 -G -I -A -# -N -鷄 -髙 -嬰 -啓 -約 -隹 -潔 -賴 -藝 -~ -寶 -籣 -麺 -  -嶺 -√ -義 -網 -峩 -長 -∧ -魚 -機 -構 -② -鳯 -偉 -L -B -㙟 -畵 -鴿 -' -詩 -溝 -嚞 -屌 -藔 -佧 -玥 -蘭 -織 -1 -3 -9 -0 -7 -點 -砭 -鴨 -鋪 -銘 -廳 -弍 -‧ -創 -湯 -坶 -℃ -卩 -骝 -& -烜 -荘 -當 -潤 -扞 -係 -懷 -碶 -钅 -蚨 -讠 -☆ -叢 -爲 -埗 -涫 -塗 -→ -楽 -現 -鯨 -愛 -瑪 -鈺 -忄 -悶 -藥 -飾 -樓 -視 -孬 -ㆍ -燚 -苪 -師 -① -丼 -锽 -│ -韓 -標 -è -兒 -閏 -匋 -張 -漢 -Ü -髪 -會 -閑 -檔 -習 -裝 -の -峯 -菘 -輝 -И -雞 -釣 -億 -浐 -K -O -R -8 -H -E -P -T -W -D -S -C -M -F -姌 -饹 -» -晞 -廰 -ä -嵯 -鷹 -負 -飲 -絲 -冚 -楗 -澤 -綫 -區 -❋ -← -質 -靑 -揚 -③ -滬 -統 -産 -協 -﹑ -乸 -畐 -經 -運 -際 -洺 -岽 -為 -粵 -諾 -崋 -豐 -碁 -ɔ -V -2 -6 -齋 -誠 -訂 -´ -勑 -雙 -陳 -無 -í -泩 -媄 -夌 -刂 -i -c -t -o -r -a -嘢 -耄 -燴 -暃 -壽 -媽 -靈 -抻 -體 -唻 -É -冮 -甹 -鎮 -錦 -ʌ -蜛 -蠄 -尓 -駕 -戀 -飬 -逹 -倫 -貴 -極 -Я -Й -寬 -磚 -嶪 -郎 -職 -| -間 -n -d -剎 -伈 -課 -飛 -橋 -瘊 -№ -譜 -骓 -圗 -滘 -縣 -粿 -咅 -養 -濤 -彳 -® -% -Ⅱ -啰 -㴪 -見 -矞 -薬 -糁 -邨 -鲮 -顔 -罱 -З -選 -話 -贏 -氪 -俵 -競 -瑩 -繡 -枱 -β -綉 -á -獅 -爾 -™ -麵 -戋 -淩 -徳 -個 -劇 -場 -務 -簡 -寵 -h -實 -膠 -轱 -圖 -築 -嘣 -樹 -㸃 -營 -耵 -孫 -饃 -鄺 -飯 -麯 -遠 -輸 -坫 -孃 -乚 -閃 -鏢 -㎡ -題 -廠 -關 -↑ -爺 -將 -軍 -連 -篦 -覌 -參 -箸 -- -窠 -棽 -寕 -夀 -爰 -歐 -呙 -閥 -頡 -熱 -雎 -垟 -裟 -凬 -勁 -帑 -馕 -夆 -疌 -枼 -馮 -貨 -蒤 -樸 -彧 -旸 -靜 -龢 -暢 -㐱 -鳥 -珺 -鏡 -灡 -爭 -堷 -廚 -Ó -騰 -診 -┅ -蘇 -褔 -凱 -頂 -豕 -亞 -帥 -嘬 -⊥ -仺 -桖 -複 -饣 -絡 -穂 -顏 -棟 -納 -▏ -濟 -親 -設 -計 -攵 -埌 -烺 -ò -頤 -燦 -蓮 -撻 -節 -講 -濱 -濃 -娽 -洳 -朿 -燈 -鈴 -護 -膚 -铔 -過 -補 -Z -U -5 -4 -坋 -闿 -䖝 -餘 -缐 -铞 -貿 -铪 -桼 -趙 -鍊 -[ -㐂 -垚 -菓 -揸 -捲 -鐘 -滏 -𣇉 -爍 -輪 -燜 -鴻 -鮮 -動 -鹞 -鷗 -丄 -慶 -鉌 -翥 -飮 -腸 -⇋ -漁 -覺 -來 -熘 -昴 -翏 -鲱 -圧 -鄉 -萭 -頔 -爐 -嫚 -г -貭 -類 -聯 -幛 -輕 -訓 -鑒 -夋 -锨 -芃 -珣 -䝉 -扙 -嵐 -銷 -處 -ㄱ -語 -誘 -苝 -歸 -儀 -燒 -楿 -內 -粢 -葒 -奧 -麥 -礻 -滿 -蠔 -穵 -瞭 -態 -鱬 -榞 -硂 -鄭 -黃 -煙 -祐 -奓 -逺 -* -瑄 -獲 -聞 -薦 -讀 -這 -樣 -決 -問 -啟 -們 -執 -説 -轉 -單 -隨 -唘 -帶 -倉 -庫 -還 -贈 -尙 -皺 -■ -餅 -產 -○ -∈ -報 -狀 -楓 -賠 -琯 -嗮 -禮 -` -傳 -> -≤ -嗞 -Φ -≥ -換 -咭 -∣ -↓ -曬 -ε -応 -寫 -″ -終 -様 -純 -費 -療 -聨 -凍 -壐 -郵 -ü -黒 -∫ -製 -塊 -調 -軽 -確 -撃 -級 -馴 -Ⅲ -涇 -繹 -數 -碼 -證 -狒 -処 -劑 -< -晧 -賀 -衆 -] -櫥 -兩 -陰 -絶 -對 -鯉 -憶 -◎ -p -e -Y -蕒 -煖 -頓 -測 -試 -鼽 -僑 -碩 -妝 -帯 -≈ -鐡 -舖 -權 -喫 -倆 -ˋ -該 -悅 -ā -俫 -. -f -s -b -m -k -g -u -j -貼 -淨 -濕 -針 -適 -備 -l -/ -給 -謢 -強 -觸 -衛 -與 -⊙ -$ -緯 -變 -⑴ -⑵ -⑶ -㎏ -殺 -∩ -幚 -─ -價 -▲ -離 -ú -ó -飄 -烏 -関 -閟 -﹝ -﹞ -邏 -輯 -鍵 -驗 -訣 -導 -歷 -屆 -層 -▼ -儱 -錄 -熳 -ē -艦 -吋 -錶 -辧 -飼 -顯 -④ -禦 -販 -気 -対 -枰 -閩 -紀 -幹 -瞓 -貊 -淚 -△ -眞 -墊 -Ω -獻 -褲 -縫 -緑 -亜 -鉅 -餠 -{ -} -◆ -蘆 -薈 -█ -◇ -溫 -彈 -晳 -粧 -犸 -穩 -訊 -崬 -凖 -熥 -П -舊 -條 -紋 -圍 -Ⅳ -筆 -尷 -難 -雜 -錯 -綁 -識 -頰 -鎖 -艶 -□ -殁 -殼 -⑧ -├ -▕ -鵬 -ǐ -ō -ǒ -糝 -綱 -▎ -μ -盜 -饅 -醬 -籤 -蓋 -釀 -鹽 -據 -à -ɡ -辦 -◥ -彐 -┌ -婦 -獸 -鲩 -伱 -ī -蒟 -蒻 -齊 -袆 -腦 -寧 -凈 -妳 -煥 -詢 -偽 -謹 -啫 -鯽 -騷 -鱸 -損 -傷 -鎻 -髮 -買 -冏 -儥 -両 -﹢ -∞ -載 -喰 -z -羙 -悵 -燙 -曉 -員 -組 -徹 -艷 -痠 -鋼 -鼙 -縮 -細 -嚒 -爯 -≠ -維 -" -鱻 -壇 -厍 -帰 -浥 -犇 -薡 -軎 -² -應 -醜 -刪 -緻 -鶴 -賜 -噁 -軌 -尨 -镔 -鷺 -槗 -彌 -葚 -濛 -請 -溇 -緹 -賢 -訪 -獴 -瑅 -資 -縤 -陣 -蕟 -栢 -韻 -祼 -恁 -伢 -謝 -劃 -涑 -總 -衖 -踺 -砋 -凉 -籃 -駿 -苼 -瘋 -昽 -紡 -驊 -腎 -﹗ -響 -杋 -剛 -嚴 -禪 -歓 -槍 -傘 -檸 -檫 -炣 -勢 -鏜 -鎢 -銑 -尐 -減 -奪 -惡 -θ -僮 -婭 -臘 -ū -ì -殻 -鉄 -∑ -蛲 -焼 -緖 -續 -紹 -懮 diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java deleted file mode 100644 index 6f8c45ff4f..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/examples/ocr/OcrSettingsActivity.java +++ /dev/null @@ -1,198 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.examples.ocr; - -import android.annotation.SuppressLint; -import android.content.Context; -import android.content.SharedPreferences; -import android.os.Bundle; -import android.preference.EditTextPreference; -import android.preference.ListPreference; -import android.preference.PreferenceManager; -import android.support.v7.app.ActionBar; - -import com.baidu.paddle.fastdeploy.app.examples.R; -import com.baidu.paddle.fastdeploy.app.ui.Utils; -import com.baidu.paddle.fastdeploy.app.ui.view.AppCompatPreferenceActivity; - -import java.util.ArrayList; -import java.util.List; - -public class OcrSettingsActivity extends AppCompatPreferenceActivity implements - SharedPreferences.OnSharedPreferenceChangeListener { - private static final String TAG = OcrSettingsActivity.class.getSimpleName(); - - static public int selectedModelIdx = -1; - static public String modelDir = ""; - static public String labelPath = ""; - static public int cpuThreadNum = 2; - static public String cpuPowerMode = ""; - static public float scoreThreshold = 0.4f; - static public String enableLiteFp16 = "true"; - - ListPreference lpChoosePreInstalledModel = null; - EditTextPreference etModelDir = null; - EditTextPreference etLabelPath = null; - ListPreference lpCPUThreadNum = null; - ListPreference lpCPUPowerMode = null; - EditTextPreference etScoreThreshold = null; - ListPreference lpEnableLiteFp16 = null; - - List preInstalledModelDirs = null; - List preInstalledLabelPaths = null; - List preInstalledCPUThreadNums = null; - List preInstalledCPUPowerModes = null; - List preInstalledScoreThresholds = null; - List preInstalledEnableLiteFp16s = null; - - @Override - public void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - addPreferencesFromResource(R.xml.ocr_settings); - ActionBar supportActionBar = getSupportActionBar(); - if (supportActionBar != null) { - supportActionBar.setDisplayHomeAsUpEnabled(true); - } - - // Initialize pre-installed models - preInstalledModelDirs = new ArrayList(); - preInstalledLabelPaths = new ArrayList(); - preInstalledCPUThreadNums = new ArrayList(); - preInstalledCPUPowerModes = new ArrayList(); - preInstalledScoreThresholds = new ArrayList(); - preInstalledEnableLiteFp16s = new ArrayList(); - preInstalledModelDirs.add(getString(R.string.OCR_MODEL_DIR_DEFAULT)); - preInstalledLabelPaths.add(getString(R.string.OCR_REC_LABEL_DEFAULT)); - preInstalledCPUThreadNums.add(getString(R.string.CPU_THREAD_NUM_DEFAULT)); - preInstalledCPUPowerModes.add(getString(R.string.CPU_POWER_MODE_DEFAULT)); - preInstalledScoreThresholds.add(getString(R.string.SCORE_THRESHOLD_DEFAULT)); - preInstalledEnableLiteFp16s.add(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT)); - - // Setup UI components - lpChoosePreInstalledModel = - (ListPreference) findPreference(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY)); - String[] preInstalledModelNames = new String[preInstalledModelDirs.size()]; - for (int i = 0; i < preInstalledModelDirs.size(); i++) { - preInstalledModelNames[i] = preInstalledModelDirs.get(i).substring(preInstalledModelDirs.get(i).lastIndexOf("/") + 1); - } - lpChoosePreInstalledModel.setEntries(preInstalledModelNames); - lpChoosePreInstalledModel.setEntryValues(preInstalledModelDirs.toArray(new String[preInstalledModelDirs.size()])); - lpCPUThreadNum = (ListPreference) findPreference(getString(R.string.CPU_THREAD_NUM_KEY)); - lpCPUPowerMode = (ListPreference) findPreference(getString(R.string.CPU_POWER_MODE_KEY)); - etModelDir = (EditTextPreference) findPreference(getString(R.string.MODEL_DIR_KEY)); - etModelDir.setTitle("Model dir (SDCard: " + Utils.getSDCardDirectory() + ")"); - etLabelPath = (EditTextPreference) findPreference(getString(R.string.LABEL_PATH_KEY)); - etLabelPath.setTitle("Label path (SDCard: " + Utils.getSDCardDirectory() + ")"); - etScoreThreshold = (EditTextPreference) findPreference(getString(R.string.SCORE_THRESHOLD_KEY)); - lpEnableLiteFp16 = (ListPreference) findPreference(getString(R.string.ENABLE_LITE_FP16_MODE_KEY)); - } - - @SuppressLint("ApplySharedPref") - private void reloadSettingsAndUpdateUI() { - SharedPreferences sharedPreferences = getPreferenceScreen().getSharedPreferences(); - - String selected_model_dir = sharedPreferences.getString(getString(R.string.CHOOSE_PRE_INSTALLED_MODEL_KEY), - getString(R.string.OCR_MODEL_DIR_DEFAULT)); - int selected_model_idx = lpChoosePreInstalledModel.findIndexOfValue(selected_model_dir); - if (selected_model_idx >= 0 && selected_model_idx < preInstalledModelDirs.size() && selected_model_idx != selectedModelIdx) { - SharedPreferences.Editor editor = sharedPreferences.edit(); - editor.putString(getString(R.string.MODEL_DIR_KEY), preInstalledModelDirs.get(selected_model_idx)); - editor.putString(getString(R.string.LABEL_PATH_KEY), preInstalledLabelPaths.get(selected_model_idx)); - editor.putString(getString(R.string.CPU_THREAD_NUM_KEY), preInstalledCPUThreadNums.get(selected_model_idx)); - editor.putString(getString(R.string.CPU_POWER_MODE_KEY), preInstalledCPUPowerModes.get(selected_model_idx)); - editor.putString(getString(R.string.SCORE_THRESHOLD_KEY), preInstalledScoreThresholds.get(selected_model_idx)); - editor.putString(getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT), preInstalledEnableLiteFp16s.get(selected_model_idx)); - editor.commit(); - lpChoosePreInstalledModel.setSummary(selected_model_dir); - selectedModelIdx = selected_model_idx; - } - - String model_dir = sharedPreferences.getString(getString(R.string.MODEL_DIR_KEY), - getString(R.string.OCR_MODEL_DIR_DEFAULT)); - String label_path = sharedPreferences.getString(getString(R.string.LABEL_PATH_KEY), - getString(R.string.OCR_REC_LABEL_DEFAULT)); - String cpu_thread_num = sharedPreferences.getString(getString(R.string.CPU_THREAD_NUM_KEY), - getString(R.string.CPU_THREAD_NUM_DEFAULT)); - String cpu_power_mode = sharedPreferences.getString(getString(R.string.CPU_POWER_MODE_KEY), - getString(R.string.CPU_POWER_MODE_DEFAULT)); - String score_threshold = sharedPreferences.getString(getString(R.string.SCORE_THRESHOLD_KEY), - getString(R.string.SCORE_THRESHOLD_DEFAULT)); - String enable_lite_fp16 = sharedPreferences.getString(getString(R.string.ENABLE_LITE_FP16_MODE_KEY), - getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT)); - - etModelDir.setSummary(model_dir); - etLabelPath.setSummary(label_path); - lpCPUThreadNum.setValue(cpu_thread_num); - lpCPUThreadNum.setSummary(cpu_thread_num); - lpCPUPowerMode.setValue(cpu_power_mode); - lpCPUPowerMode.setSummary(cpu_power_mode); - etScoreThreshold.setSummary(score_threshold); - etScoreThreshold.setText(score_threshold); - lpEnableLiteFp16.setValue(enable_lite_fp16); - lpEnableLiteFp16.setSummary(enable_lite_fp16); - - } - - static boolean checkAndUpdateSettings(Context ctx) { - boolean settingsChanged = false; - SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(ctx); - - String model_dir = sharedPreferences.getString(ctx.getString(R.string.MODEL_DIR_KEY), - ctx.getString(R.string.OCR_MODEL_DIR_DEFAULT)); - settingsChanged |= !modelDir.equalsIgnoreCase(model_dir); - modelDir = model_dir; - - String label_path = sharedPreferences.getString(ctx.getString(R.string.LABEL_PATH_KEY), - ctx.getString(R.string.OCR_REC_LABEL_DEFAULT)); - settingsChanged |= !labelPath.equalsIgnoreCase(label_path); - labelPath = label_path; - - String cpu_thread_num = sharedPreferences.getString(ctx.getString(R.string.CPU_THREAD_NUM_KEY), - ctx.getString(R.string.CPU_THREAD_NUM_DEFAULT)); - settingsChanged |= cpuThreadNum != Integer.parseInt(cpu_thread_num); - cpuThreadNum = Integer.parseInt(cpu_thread_num); - - String cpu_power_mode = sharedPreferences.getString(ctx.getString(R.string.CPU_POWER_MODE_KEY), - ctx.getString(R.string.CPU_POWER_MODE_DEFAULT)); - settingsChanged |= !cpuPowerMode.equalsIgnoreCase(cpu_power_mode); - cpuPowerMode = cpu_power_mode; - - String score_threshold = sharedPreferences.getString(ctx.getString(R.string.SCORE_THRESHOLD_KEY), - ctx.getString(R.string.SCORE_THRESHOLD_DEFAULT)); - settingsChanged |= scoreThreshold != Float.parseFloat(score_threshold); - scoreThreshold = Float.parseFloat(score_threshold); - - String enable_lite_fp16 = sharedPreferences.getString(ctx.getString(R.string.ENABLE_LITE_FP16_MODE_KEY), - ctx.getString(R.string.ENABLE_LITE_FP16_MODE_DEFAULT)); - settingsChanged |= !enableLiteFp16.equalsIgnoreCase(enable_lite_fp16); - enableLiteFp16 = enable_lite_fp16; - - return settingsChanged; - } - - static void resetSettings() { - selectedModelIdx = -1; - modelDir = ""; - labelPath = ""; - cpuThreadNum = 2; - cpuPowerMode = ""; - scoreThreshold = 0.4f; - enableLiteFp16 = "true"; - } - - @Override - protected void onResume() { - super.onResume(); - getPreferenceScreen().getSharedPreferences().registerOnSharedPreferenceChangeListener(this); - reloadSettingsAndUpdateUI(); - } - - @Override - protected void onPause() { - super.onPause(); - getPreferenceScreen().getSharedPreferences().unregisterOnSharedPreferenceChangeListener(this); - } - - @Override - public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { - reloadSettingsAndUpdateUI(); - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java deleted file mode 100644 index eabeb74f46..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/Utils.java +++ /dev/null @@ -1,313 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui; - -import android.content.Context; -import android.content.res.Resources; -import android.database.Cursor; -import android.graphics.Bitmap; -import android.graphics.BitmapFactory; -import android.hardware.Camera; -import android.net.Uri; -import android.opengl.GLES20; -import android.os.Environment; -import android.provider.MediaStore; -import android.util.Log; -import android.view.Surface; -import android.view.WindowManager; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.List; - -public class Utils { - private static final String TAG = Utils.class.getSimpleName(); - - public static void RecursiveCreateDirectories(String fileDir) { - String[] fileDirs = fileDir.split("\\/"); - String topPath = ""; - for (int i = 0; i < fileDirs.length; i++) { - topPath += "/" + fileDirs[i]; - File file = new File(topPath); - if (file.exists()) { - continue; - } else { - file.mkdir(); - } - } - } - - public static void copyFileFromAssets(Context appCtx, String srcPath, String dstPath) { - if (srcPath.isEmpty() || dstPath.isEmpty()) { - return; - } - String dstDir = dstPath.substring(0, dstPath.lastIndexOf('/')); - if (dstDir.length() > 0) { - RecursiveCreateDirectories(dstDir); - } - InputStream is = null; - OutputStream os = null; - try { - is = new BufferedInputStream(appCtx.getAssets().open(srcPath)); - os = new BufferedOutputStream(new FileOutputStream(new File(dstPath))); - byte[] buffer = new byte[1024]; - int length = 0; - while ((length = is.read(buffer)) != -1) { - os.write(buffer, 0, length); - } - } catch (FileNotFoundException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } finally { - try { - os.close(); - is.close(); - } catch (IOException e) { - e.printStackTrace(); - } - } - } - - public static void copyDirectoryFromAssets(Context appCtx, String srcDir, String dstDir) { - if (srcDir.isEmpty() || dstDir.isEmpty()) { - return; - } - try { - if (!new File(dstDir).exists()) { - new File(dstDir).mkdirs(); - } - for (String fileName : appCtx.getAssets().list(srcDir)) { - String srcSubPath = srcDir + File.separator + fileName; - String dstSubPath = dstDir + File.separator + fileName; - if (new File(srcSubPath).isDirectory()) { - copyDirectoryFromAssets(appCtx, srcSubPath, dstSubPath); - } else { - copyFileFromAssets(appCtx, srcSubPath, dstSubPath); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - } - - public static float[] parseFloatsFromString(String string, String delimiter) { - String[] pieces = string.trim().toLowerCase().split(delimiter); - float[] floats = new float[pieces.length]; - for (int i = 0; i < pieces.length; i++) { - floats[i] = Float.parseFloat(pieces[i].trim()); - } - return floats; - } - - public static long[] parseLongsFromString(String string, String delimiter) { - String[] pieces = string.trim().toLowerCase().split(delimiter); - long[] longs = new long[pieces.length]; - for (int i = 0; i < pieces.length; i++) { - longs[i] = Long.parseLong(pieces[i].trim()); - } - return longs; - } - - public static String getSDCardDirectory() { - return Environment.getExternalStorageDirectory().getAbsolutePath(); - } - - public static String getDCIMDirectory() { - return Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM).getAbsolutePath(); - } - - public static Camera.Size getOptimalPreviewSize(List sizes, int w, int h) { - final double ASPECT_TOLERANCE = 0.3; - double targetRatio = (double) w / h; - if (sizes == null) return null; - - Camera.Size optimalSize = null; - double minDiff = Double.MAX_VALUE; - - int targetHeight = h; - - // Try to find an size match aspect ratio and size - for (Camera.Size size : sizes) { - double ratio = (double) size.width / size.height; - if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; - if (Math.abs(size.height - targetHeight) < minDiff) { - optimalSize = size; - minDiff = Math.abs(size.height - targetHeight); - } - } - - // Cannot find the one match the aspect ratio, ignore the requirement - if (optimalSize == null) { - minDiff = Double.MAX_VALUE; - for (Camera.Size size : sizes) { - if (Math.abs(size.height - targetHeight) < minDiff) { - optimalSize = size; - minDiff = Math.abs(size.height - targetHeight); - } - } - } - return optimalSize; - } - - public static int getScreenWidth() { - return Resources.getSystem().getDisplayMetrics().widthPixels; - } - - public static int getScreenHeight() { - return Resources.getSystem().getDisplayMetrics().heightPixels; - } - - public static int getCameraDisplayOrientation(Context context, int cameraId) { - Camera.CameraInfo info = new Camera.CameraInfo(); - Camera.getCameraInfo(cameraId, info); - WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); - int rotation = wm.getDefaultDisplay().getRotation(); - int degrees = 0; - switch (rotation) { - case Surface.ROTATION_0: - degrees = 0; - break; - case Surface.ROTATION_90: - degrees = 90; - break; - case Surface.ROTATION_180: - degrees = 180; - break; - case Surface.ROTATION_270: - degrees = 270; - break; - } - int result; - if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { - result = (info.orientation + degrees) % 360; - result = (360 - result) % 360; // compensate the mirror - } else { - // back-facing - result = (info.orientation - degrees + 360) % 360; - } - return result; - } - - public static int createShaderProgram(String vss, String fss) { - int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); - GLES20.glShaderSource(vshader, vss); - GLES20.glCompileShader(vshader); - int[] status = new int[1]; - GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0); - if (status[0] == 0) { - Log.e(TAG, GLES20.glGetShaderInfoLog(vshader)); - GLES20.glDeleteShader(vshader); - vshader = 0; - return 0; - } - - int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); - GLES20.glShaderSource(fshader, fss); - GLES20.glCompileShader(fshader); - GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0); - if (status[0] == 0) { - Log.e(TAG, GLES20.glGetShaderInfoLog(fshader)); - GLES20.glDeleteShader(vshader); - GLES20.glDeleteShader(fshader); - fshader = 0; - return 0; - } - - int program = GLES20.glCreateProgram(); - GLES20.glAttachShader(program, vshader); - GLES20.glAttachShader(program, fshader); - GLES20.glLinkProgram(program); - GLES20.glDeleteShader(vshader); - GLES20.glDeleteShader(fshader); - GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0); - if (status[0] == 0) { - Log.e(TAG, GLES20.glGetProgramInfoLog(program)); - program = 0; - return 0; - } - GLES20.glValidateProgram(program); - GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0); - if (status[0] == 0) { - Log.e(TAG, GLES20.glGetProgramInfoLog(program)); - GLES20.glDeleteProgram(program); - program = 0; - return 0; - } - - return program; - } - - public static boolean isSupportedNPU() { - String hardware = android.os.Build.HARDWARE; - return hardware.equalsIgnoreCase("kirin810") || hardware.equalsIgnoreCase("kirin990"); - } - - public static Bitmap decodeBitmap(String path, int displayWidth, int displayHeight) { - BitmapFactory.Options op = new BitmapFactory.Options(); - op.inJustDecodeBounds = true;// Only the width and height information of Bitmap is read, not the pixels. - Bitmap bmp = BitmapFactory.decodeFile(path, op); // Get size information. - int wRatio = (int) Math.ceil(op.outWidth / (float) displayWidth);// Get Scale Size. - int hRatio = (int) Math.ceil(op.outHeight / (float) displayHeight); - // If the specified size is exceeded, reduce the corresponding scale. - if (wRatio > 1 && hRatio > 1) { - if (wRatio > hRatio) { - // If it is too wide, we will reduce the width to the required size. Note that the height will become smaller. - op.inSampleSize = wRatio; - } else { - op.inSampleSize = hRatio; - } - } - op.inJustDecodeBounds = false; - bmp = BitmapFactory.decodeFile(path, op); - // Create a Bitmap with a given width and height from the original Bitmap. - return Bitmap.createScaledBitmap(bmp, displayWidth, displayHeight, true); - } - - public static String getRealPathFromURI(Context context, Uri contentURI) { - String result; - Cursor cursor = null; - try { - cursor = context.getContentResolver().query(contentURI, null, null, null, null); - } catch (Throwable e) { - e.printStackTrace(); - } - if (cursor == null) { - result = contentURI.getPath(); - } else { - cursor.moveToFirst(); - int idx = cursor.getColumnIndex(MediaStore.Images.ImageColumns.DATA); - result = cursor.getString(idx); - cursor.close(); - } - return result; - } - - public static List readTxt(String txtPath) { - File file = new File(txtPath); - if (file.isFile() && file.exists()) { - try { - FileInputStream fileInputStream = new FileInputStream(file); - InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream); - BufferedReader bufferedReader = new BufferedReader(inputStreamReader); - String text; - List labels = new ArrayList<>(); - while ((text = bufferedReader.readLine()) != null) { - labels.add(text); - } - return labels; - } catch (Exception e) { - e.printStackTrace(); - } - } - return null; - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java deleted file mode 100644 index 099219fa9f..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/layout/ActionBarLayout.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.layout; - -import android.content.Context; -import android.graphics.Color; -import android.support.annotation.Nullable; -import android.util.AttributeSet; -import android.widget.RelativeLayout; - - -public class ActionBarLayout extends RelativeLayout { - private int layoutHeight = 150; - - public ActionBarLayout(Context context) { - super(context); - } - - public ActionBarLayout(Context context, @Nullable AttributeSet attrs) { - super(context, attrs); - } - - public ActionBarLayout(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { - super(context, attrs, defStyleAttr); - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - super.onMeasure(widthMeasureSpec, heightMeasureSpec); - int width = MeasureSpec.getSize(widthMeasureSpec); - setMeasuredDimension(width, layoutHeight); - setBackgroundColor(Color.BLACK); - setAlpha(0.9f); - } -} \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java deleted file mode 100644 index c1a952dcff..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/AppCompatPreferenceActivity.java +++ /dev/null @@ -1,111 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.view; - -import android.content.res.Configuration; -import android.os.Bundle; -import android.preference.PreferenceActivity; -import android.support.annotation.LayoutRes; -import android.support.annotation.Nullable; -import android.support.v7.app.ActionBar; -import android.support.v7.app.AppCompatDelegate; -import android.support.v7.widget.Toolbar; -import android.view.MenuInflater; -import android.view.View; -import android.view.ViewGroup; - -/** - * A {@link PreferenceActivity} which implements and proxies the necessary calls - * to be used with AppCompat. - *

- * This technique can be used with an {@link android.app.Activity} class, not just - * {@link PreferenceActivity}. - */ -public abstract class AppCompatPreferenceActivity extends PreferenceActivity { - private AppCompatDelegate mDelegate; - - @Override - protected void onCreate(Bundle savedInstanceState) { - getDelegate().installViewFactory(); - getDelegate().onCreate(savedInstanceState); - super.onCreate(savedInstanceState); - } - - @Override - protected void onPostCreate(Bundle savedInstanceState) { - super.onPostCreate(savedInstanceState); - getDelegate().onPostCreate(savedInstanceState); - } - - public ActionBar getSupportActionBar() { - return getDelegate().getSupportActionBar(); - } - - public void setSupportActionBar(@Nullable Toolbar toolbar) { - getDelegate().setSupportActionBar(toolbar); - } - - @Override - public MenuInflater getMenuInflater() { - return getDelegate().getMenuInflater(); - } - - @Override - public void setContentView(@LayoutRes int layoutResID) { - getDelegate().setContentView(layoutResID); - } - - @Override - public void setContentView(View view) { - getDelegate().setContentView(view); - } - - @Override - public void setContentView(View view, ViewGroup.LayoutParams params) { - getDelegate().setContentView(view, params); - } - - @Override - public void addContentView(View view, ViewGroup.LayoutParams params) { - getDelegate().addContentView(view, params); - } - - @Override - protected void onPostResume() { - super.onPostResume(); - getDelegate().onPostResume(); - } - - @Override - protected void onTitleChanged(CharSequence title, int color) { - super.onTitleChanged(title, color); - getDelegate().setTitle(title); - } - - @Override - public void onConfigurationChanged(Configuration newConfig) { - super.onConfigurationChanged(newConfig); - getDelegate().onConfigurationChanged(newConfig); - } - - @Override - protected void onStop() { - super.onStop(); - getDelegate().onStop(); - } - - @Override - protected void onDestroy() { - super.onDestroy(); - getDelegate().onDestroy(); - } - - public void invalidateOptionsMenu() { - getDelegate().invalidateOptionsMenu(); - } - - private AppCompatDelegate getDelegate() { - if (mDelegate == null) { - mDelegate = AppCompatDelegate.create(this, null); - } - return mDelegate; - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java deleted file mode 100644 index e90874c627..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/CameraSurfaceView.java +++ /dev/null @@ -1,353 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.view; - -import android.content.Context; -import android.graphics.Bitmap; -import android.graphics.SurfaceTexture; -import android.hardware.Camera; -import android.hardware.Camera.CameraInfo; -import android.hardware.Camera.Size; -import android.opengl.GLES11Ext; -import android.opengl.GLES20; -import android.opengl.GLSurfaceView; -import android.opengl.GLSurfaceView.Renderer; -import android.opengl.GLUtils; -import android.opengl.Matrix; -import android.os.SystemClock; -import android.util.AttributeSet; -import android.util.Log; - -import com.baidu.paddle.fastdeploy.app.ui.Utils; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.FloatBuffer; -import java.util.List; - -import javax.microedition.khronos.egl.EGLConfig; -import javax.microedition.khronos.opengles.GL10; - -public class CameraSurfaceView extends GLSurfaceView implements Renderer, - SurfaceTexture.OnFrameAvailableListener { - private static final String TAG = CameraSurfaceView.class.getSimpleName(); - - public static int EXPECTED_PREVIEW_WIDTH = 1280; // 1920 - public static int EXPECTED_PREVIEW_HEIGHT = 720; // 960 - - protected int numberOfCameras; - protected int selectedCameraId; - protected boolean disableCamera = false; - protected Camera camera; - - protected Context context; - protected SurfaceTexture surfaceTexture; - protected int surfaceWidth = 0; - protected int surfaceHeight = 0; - protected int textureWidth = 0; - protected int textureHeight = 0; - - protected Bitmap ARGB8888ImageBitmap; - protected boolean bitmapReleaseMode = true; - - // In order to manipulate the camera preview data and render the modified one - // to the screen, three textures are created and the data flow is shown as following: - // previewdata->camTextureId->fboTexureId->drawTexureId->framebuffer - protected int[] fbo = {0}; - protected int[] camTextureId = {0}; - protected int[] fboTexureId = {0}; - protected int[] drawTexureId = {0}; - - private final String vss = "" - + "attribute vec2 vPosition;\n" - + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" - + "void main() {\n" + " texCoord = vTexCoord;\n" - + " gl_Position = vec4 (vPosition.x, vPosition.y, 0.0, 1.0);\n" - + "}"; - - private final String fssCam2FBO = "" - + "#extension GL_OES_EGL_image_external : require\n" - + "precision mediump float;\n" - + "uniform samplerExternalOES sTexture;\n" - + "varying vec2 texCoord;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; - - private final String fssTex2Screen = "" - + "precision mediump float;\n" - + "uniform sampler2D sTexture;\n" - + "varying vec2 texCoord;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; - - private final float[] vertexCoords = { - -1, -1, - -1, 1, - 1, -1, - 1, 1}; - private float[] textureCoords = { - 0, 1, - 0, 0, - 1, 1, - 1, 0}; - - private FloatBuffer vertexCoordsBuffer; - private FloatBuffer textureCoordsBuffer; - - private int progCam2FBO = -1; - private int progTex2Screen = -1; - private int vcCam2FBO; - private int tcCam2FBO; - private int vcTex2Screen; - private int tcTex2Screen; - - public void setBitmapReleaseMode(boolean mode) { - synchronized (this) { - bitmapReleaseMode = mode; - } - } - - public Bitmap getBitmap() { - return ARGB8888ImageBitmap; // may null or recycled. - } - - public interface OnTextureChangedListener { - boolean onTextureChanged(Bitmap ARGB8888ImageBitmap); - } - - private OnTextureChangedListener onTextureChangedListener = null; - - public void setOnTextureChangedListener(OnTextureChangedListener listener) { - onTextureChangedListener = listener; - } - - public CameraSurfaceView(Context ctx, AttributeSet attrs) { - super(ctx, attrs); - context = ctx; - setEGLContextClientVersion(2); - setRenderer(this); - setRenderMode(RENDERMODE_WHEN_DIRTY); - - // Find the total number of available cameras and the ID of the default camera - numberOfCameras = Camera.getNumberOfCameras(); - CameraInfo cameraInfo = new CameraInfo(); - for (int i = 0; i < numberOfCameras; i++) { - Camera.getCameraInfo(i, cameraInfo); - if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) { - selectedCameraId = i; - } - } - } - - @Override - public void onSurfaceCreated(GL10 gl, EGLConfig config) { - // Create OES texture for storing camera preview data(YUV format) - GLES20.glGenTextures(1, camTextureId, 0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, camTextureId[0]); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - surfaceTexture = new SurfaceTexture(camTextureId[0]); - surfaceTexture.setOnFrameAvailableListener(this); - - // Prepare vertex and texture coordinates - int bytes = vertexCoords.length * Float.SIZE / Byte.SIZE; - vertexCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); - textureCoordsBuffer = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); - vertexCoordsBuffer.put(vertexCoords).position(0); - textureCoordsBuffer.put(textureCoords).position(0); - - // Create vertex and fragment shaders - // camTextureId->fboTexureId - progCam2FBO = Utils.createShaderProgram(vss, fssCam2FBO); - vcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vPosition"); - tcCam2FBO = GLES20.glGetAttribLocation(progCam2FBO, "vTexCoord"); - GLES20.glEnableVertexAttribArray(vcCam2FBO); - GLES20.glEnableVertexAttribArray(tcCam2FBO); - // fboTexureId/drawTexureId -> screen - progTex2Screen = Utils.createShaderProgram(vss, fssTex2Screen); - vcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vPosition"); - tcTex2Screen = GLES20.glGetAttribLocation(progTex2Screen, "vTexCoord"); - GLES20.glEnableVertexAttribArray(vcTex2Screen); - GLES20.glEnableVertexAttribArray(tcTex2Screen); - } - - @Override - public void onSurfaceChanged(GL10 gl, int width, int height) { - surfaceWidth = width; - surfaceHeight = height; - openCamera(); - } - - @Override - public void onDrawFrame(GL10 gl) { - if (surfaceTexture == null) return; - - GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); - surfaceTexture.updateTexImage(); - float[] matrix = new float[16]; - surfaceTexture.getTransformMatrix(matrix); - - // camTextureId->fboTexureId - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo[0]); - GLES20.glViewport(0, 0, textureWidth, textureHeight); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - GLES20.glUseProgram(progCam2FBO); - GLES20.glVertexAttribPointer(vcCam2FBO, 2, GLES20.GL_FLOAT, false, 4 * 2, vertexCoordsBuffer); - textureCoordsBuffer.clear(); - textureCoordsBuffer.put(transformTextureCoordinates(textureCoords, matrix)); - textureCoordsBuffer.position(0); - GLES20.glVertexAttribPointer(tcCam2FBO, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoordsBuffer); - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, camTextureId[0]); - GLES20.glUniform1i(GLES20.glGetUniformLocation(progCam2FBO, "sTexture"), 0); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - GLES20.glFlush(); - - // Check if the draw texture is set - int targetTexureId = fboTexureId[0]; - if (onTextureChangedListener != null) { - // Read pixels of FBO to a bitmap - ByteBuffer pixelBuffer = ByteBuffer.allocate(textureWidth * textureHeight * 4); - GLES20.glReadPixels(0, 0, textureWidth, textureHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuffer); - - ARGB8888ImageBitmap = Bitmap.createBitmap(textureWidth, textureHeight, Bitmap.Config.ARGB_8888); - ARGB8888ImageBitmap.copyPixelsFromBuffer(pixelBuffer); - - boolean modified = onTextureChangedListener.onTextureChanged(ARGB8888ImageBitmap); - - if (modified) { - targetTexureId = drawTexureId[0]; - // Update a bitmap to the GL texture if modified - GLES20.glActiveTexture(targetTexureId); - // GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, targetTexureId); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, targetTexureId); - GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, ARGB8888ImageBitmap, 0); - } - if (bitmapReleaseMode) { - ARGB8888ImageBitmap.recycle(); - } - } - - // fboTexureId/drawTexureId->Screen - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); - GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - GLES20.glUseProgram(progTex2Screen); - GLES20.glVertexAttribPointer(vcTex2Screen, 2, GLES20.GL_FLOAT, false, 4 * 2, vertexCoordsBuffer); - textureCoordsBuffer.clear(); - textureCoordsBuffer.put(textureCoords); - textureCoordsBuffer.position(0); - GLES20.glVertexAttribPointer(tcTex2Screen, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoordsBuffer); - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, targetTexureId); - GLES20.glUniform1i(GLES20.glGetUniformLocation(progTex2Screen, "sTexture"), 0); - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - GLES20.glFlush(); - } - - private float[] transformTextureCoordinates(float[] coords, float[] matrix) { - float[] result = new float[coords.length]; - float[] vt = new float[4]; - for (int i = 0; i < coords.length; i += 2) { - float[] v = {coords[i], coords[i + 1], 0, 1}; - Matrix.multiplyMV(vt, 0, matrix, 0, v, 0); - result[i] = vt[0]; - result[i + 1] = vt[1]; - } - return result; - } - - @Override - public void onResume() { - super.onResume(); - } - - @Override - public void onPause() { - super.onPause(); - releaseCamera(); - } - - @Override - public void onFrameAvailable(SurfaceTexture surfaceTexture) { - requestRender(); - } - - public void disableCamera() { - disableCamera = true; - } - - public void enableCamera() { - disableCamera = false; - } - - public void switchCamera() { - releaseCamera(); - selectedCameraId = (selectedCameraId + 1) % numberOfCameras; - openCamera(); - } - - public void openCamera() { - if (disableCamera) return; - camera = Camera.open(selectedCameraId); - List supportedPreviewSizes = camera.getParameters().getSupportedPreviewSizes(); - Size previewSize = Utils.getOptimalPreviewSize(supportedPreviewSizes, EXPECTED_PREVIEW_WIDTH, - EXPECTED_PREVIEW_HEIGHT); - Camera.Parameters parameters = camera.getParameters(); - parameters.setPreviewSize(previewSize.width, previewSize.height); - if (parameters.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { - parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); - } - camera.setParameters(parameters); - int degree = Utils.getCameraDisplayOrientation(context, selectedCameraId); - camera.setDisplayOrientation(degree); - boolean rotate = degree == 90 || degree == 270; - textureWidth = rotate ? previewSize.height : previewSize.width; - textureHeight = rotate ? previewSize.width : previewSize.height; - // Destroy FBO and draw textures - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); - GLES20.glDeleteFramebuffers(1, fbo, 0); - GLES20.glDeleteTextures(1, drawTexureId, 0); - GLES20.glDeleteTextures(1, fboTexureId, 0); - // Normal texture for storing modified camera preview data(RGBA format) - GLES20.glGenTextures(1, drawTexureId, 0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, drawTexureId[0]); - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, textureWidth, textureHeight, 0, - GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - // FBO texture for storing camera preview data(RGBA format) - GLES20.glGenTextures(1, fboTexureId, 0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, fboTexureId[0]); - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, textureWidth, textureHeight, 0, - GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - // Generate FBO and bind to FBO texture - GLES20.glGenFramebuffers(1, fbo, 0); - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo[0]); - GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, - fboTexureId[0], 0); - try { - camera.setPreviewTexture(surfaceTexture); - } catch (IOException exception) { - Log.e(TAG, "IOException caused by setPreviewDisplay()", exception); - } - camera.startPreview(); - } - - public void releaseCamera() { - if (camera != null) { - camera.setPreviewCallback(null); - camera.stopPreview(); - camera.release(); - camera = null; - } - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java deleted file mode 100644 index 62b48a0547..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/ResultListView.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.view; - -import android.content.Context; -import android.os.Handler; -import android.util.AttributeSet; -import android.widget.ListView; - -public class ResultListView extends ListView { - public ResultListView(Context context) { - super(context); - } - - public ResultListView(Context context, AttributeSet attrs) { - super(context, attrs); - } - - public ResultListView(Context context, AttributeSet attrs, int defStyleAttr) { - super(context, attrs, defStyleAttr); - } - - private Handler handler; - - public void setHandler(Handler mHandler) { - handler = mHandler; - } - - public void clear() { - handler.post(new Runnable() { - @Override - public void run() { - removeAllViewsInLayout(); - invalidate(); - } - }); - } - - @Override - protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int expandSpec = MeasureSpec.makeMeasureSpec(Integer.MAX_VALUE >> 2, - MeasureSpec.AT_MOST); - super.onMeasure(widthMeasureSpec, expandSpec); - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java deleted file mode 100644 index 62747965ad..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/adapter/BaseResultAdapter.java +++ /dev/null @@ -1,48 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.view.adapter; - -import android.content.Context; -import android.support.annotation.NonNull; -import android.support.annotation.Nullable; -import android.view.LayoutInflater; -import android.view.View; -import android.view.ViewGroup; -import android.widget.ArrayAdapter; -import android.widget.TextView; - -import com.baidu.paddle.fastdeploy.app.examples.R; -import com.baidu.paddle.fastdeploy.app.ui.view.model.BaseResultModel; - -import java.text.DecimalFormat; -import java.util.List; - -public class BaseResultAdapter extends ArrayAdapter { - private int resourceId; - - public BaseResultAdapter(@NonNull Context context, int resource) { - super(context, resource); - } - - public BaseResultAdapter(@NonNull Context context, int resource, @NonNull List objects) { - super(context, resource, objects); - resourceId = resource; - } - - @NonNull - @Override - public View getView(int position, @Nullable View convertView, @NonNull ViewGroup parent) { - BaseResultModel model = getItem(position); - View view = LayoutInflater.from(getContext()).inflate(resourceId, null); - TextView indexText = (TextView) view.findViewById(R.id.index); - TextView nameText = (TextView) view.findViewById(R.id.name); - TextView confidenceText = (TextView) view.findViewById(R.id.confidence); - indexText.setText(String.valueOf(model.getIndex())); - nameText.setText(String.valueOf(model.getName())); - confidenceText.setText(formatFloatString(model.getConfidence())); - return view; - } - - public static String formatFloatString(float number) { - DecimalFormat df = new DecimalFormat("0.00"); - return df.format(number); - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java b/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java deleted file mode 100644 index cae71b6909..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/java/com/baidu/paddle/fastdeploy/app/ui/view/model/BaseResultModel.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.baidu.paddle.fastdeploy.app.ui.view.model; - -public class BaseResultModel { - private int index; - private String name; - private float confidence; - - public BaseResultModel() { - - } - - public BaseResultModel(int index, String name, float confidence) { - this.index = index; - this.name = name; - this.confidence = confidence; - } - - public float getConfidence() { - return confidence; - } - - public void setConfidence(float confidence) { - this.confidence = confidence; - } - - public int getIndex() { - return index; - } - - public void setIndex(int index) { - this.index = index; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } -} diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/action_button_layer.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/action_button_layer.xml deleted file mode 100644 index a0d2e76bfa..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/action_button_layer.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/album_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/album_btn.xml deleted file mode 100644 index 26d01c5841..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/album_btn.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml deleted file mode 100644 index 1f6bb29060..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_start_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_start_btn.xml deleted file mode 100644 index 6641344530..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_start_btn.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml deleted file mode 100644 index 8869a1b2bf..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/realtime_stop_btn.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml deleted file mode 100644 index bd068f169f..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/result_page_border_section_bk.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/round_corner_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/round_corner_btn.xml deleted file mode 100644 index c5dcc45d56..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/round_corner_btn.xml +++ /dev/null @@ -1,10 +0,0 @@ - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml deleted file mode 100644 index b349d15a6a..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_realtime.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml deleted file mode 100644 index 17cb68ed80..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_progress_result.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb.xml deleted file mode 100644 index 96bd95e0a1..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml deleted file mode 100644 index 26d033b6df..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/seekbar_thumb_shape.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/switch_side_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/switch_side_btn.xml deleted file mode 100644 index b9b2edfb6a..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/switch_side_btn.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/take_picture_btn.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/take_picture_btn.xml deleted file mode 100644 index 4966675c35..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-v24/take_picture_btn.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album.png deleted file mode 100644 index 3a6fdedaee3cce52cf376ecb9977ea750a6014df..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10284 zcmZ8{cQ~72+jl5MwJO?LMI)4=W@>L@uS$)IRYg#%c5S7Iy+<0ORimn*M(m>Y2&!t2 zs1~hRV$bjH^F8139q;=`l7Dg}*KywGb)LT!^H5*o+7;F-AQ0%9mL?nleD?l(QC|k$ zagWRMfDbY+goX;Je2DE22m}FX!Ih0(SZxGf&t@~u*pjXbyu72pU`kfK6LZn_rY-ie z#aDvvM1y?Vt9y30`euxo)V>BAm>S+RoBt4ZA#wSpkC`u#KLuT&W3l@(j0RI~E)Rp$ zqvICxDINyZA6A_jrUfyG&H2pxb-$Lv5On9vEypb7A03?R-2J+e7r0q*JYih7+0YvC zutt}Dvf1Cu>Cd;Kfp!tOL`)W0+Yjy1j&>1?cyu&b+fz_->%j#UvvffQ-Mkzo4-z># z=xoLKCn28sF-PqX4IqnjMc?;xW&JiE*ST53(qisEFR5=^A@1zfxy=Q58yF0W6@9Nl zQ}>XC(Jy9a<~B*cpa%JZ+I%_nnWJR8dwTpnBJrzWNy*4PBZ@2GQXzMuqNB4iw2R14 z94eihpclpO;(W0euQ7F;i?yg~X)!6Od))l5?~+q0KMfP>ZVtm(C!lv6?ltN>ZpSLZ z8k(qWvXAKC%@JshA?x&`nD>z0s}luQ6%{TL*s{o*py$s*giLu-X7AeZWoVo8XY{*b z;=gw3(}u&Dh7Znep%0OuWJO-zZE@GTyC-A|C{&A?b8w(Q;<65rQjeZ^VDMTcc650_ zP0xwLPoHz(GvCp=j3jL$#MahUBOxg%{~E{vq=a89f9ybBPS#cz7!c4hGBI)B=%DhO z@SZjbQZ;!wjD$G~f?G_p)Hekhq2^=IrkC2rXze`|w`jy;N?-&Uq2$ zX-l{r?lM)iWERcl*-J&XuqY9B@Cd$(nF1*(!ISE2bXPdzMKwTS(dbSBE68ebz4LQN zR7rlwan)Fvsp6k`@lg3?O67P_cjeCOVx(;r)F>o%L<-Z0n^+Te zmB5~%C$EEcR*uZBqS1<|vtb^9x~MI7~=6$-V~qPHd3c=jasl-5kJ5boR68}_D5 zXq42nvhCbsOtBM$UNJ*Bj~v^ndTXSp1)Ji_xqyEP1xNkpLT4|N@9t|ob;Q;Ajos3f zeoQ>LYZ;n)PvD#9FyVl|=$G5hUQJMKhrtAWC$rZ*Xh;->h+z~#*EdYLxP34wBWew- z1noR38`Brg&~~?9_Hi1d1z*Hqhz9V$(H%@temMlab;3(44SMs9pBc>@Wm8WBuHY=F zG&vJ|b>5w=fKo8!yK862M`#e@9zljGQd(mgd*zjbh>E;Wjk362B6%ARszu%T^OLkK zpD63+|9StcNSrVx}Npw`t`yq-Dz;&G<7$`N=ZxQtiCsgK`#D)?>4$ z8fraf)Gjl%8){3QM;89GYMq(i{Oq>N@oqiy#svb|I^mk5j@44@ZYaiBSbP?j*k*2;8~nt|fWTDDm)=Tx26Ofb5> zWoAB`JJ+S;qz|ixVk|h-qOFvRM>7TF9pD&)3g^X5DMY(Z#X%NRi}xiTuqN!Os;X8k z-$P>QCu?12VEs!~$%SJ*MSeF%6l?j`Cv7}cfk(;Y~Y!~Q}ol?AeB%9_xvwjtw{!rfhPJuJtVFKZwRMzZGV{7D6`k+UIY zHu=OulMt$NRaMpy&Oh=65pSKn?1r@jtlJ1UX7m-(==1;1oQ; zhBK`du6IPJ!#8M_WGmbyuuM0TjE;`ld1K%l4cbNDR>&4woA#NejD|_vGxe@G98k<4 zS%i@HYX~%Cgw*UO<-;HEm7qjVVf|#ej+c4|1!d5yuC1vBcOIMx&~FEqSF3ai$PEX! zI=8kfTu@UeLwcBmyc5zfie-4;%@vu%$+}2LcXlgT*j_nVl+EGDGkicP@ZFX``uAhc zviOV6g_Jw9=toKzcD+_prIk~QlOT&S!=#oU53ud^0$}R!1sBwO4&;z9iV(Yt7F8d* zX7N*I8&T@r-qxm+@^bBp1=CDYd^{V`MIto)$(0p`kz$IokBxTf$@c^fuqKAOqN|-{ zaudw7C+qZ@Rg_`qieY|QsT<+&73o?YO*6&U=nxG1Y2->X*OPdEAyECeaZtU5P(}Zu zoE8sIjM?>o$ry~mD3X|QK>N5eOP;)CD?b*E$nPwgQ-LE+?~P^3D5tWe*)K~v;@mZT zP?6Q;>TL&S)%4p$3!ap&?KvYF2Aca)eR|ME%Dk1c2wM8cjdmpnb@+iu^?IYZBq}6s zTt=J|{hgXYi{#>w^2u&_06$)k{J4hvB@6g(@P!z>xtEN?P{%Acv+yB51=vs$MDO`{ z&4hDt0;qdhMGR8#HWd)I8 zwZq>bRwc3`Ord+f{O6mHkvRKscr2r}7Y+B-4&yF9C8o|R=2{u1+I{0MeN;OoYd^S* zGAP6Cb1^+^k%e&``r}#kA~~^PYi|X>1och@sP;#GA_LI)hs?0h$$hbYQL|yvP$gb? z{R%}f7d^!nyikuHc>8DGN-#HHY77mu%Ng{9P^+L(xv3ct6C3gf{M<4LzNdAZ%DAni z3tR@5#mA<;{NwChO!9&h3?DVzAO(`#EQ(;|LY_DGie#@&j`;NC`pq}k5nFMQKof7~33>q4o`Z{KaBIQ|CG)KVun>96p*$t6s{Vj@dPi88xbtGEg-FDIaYNKKt zcw{UWUCIoTr-&Ct9QJB|F}xDDIoDK2#2=T#fK}reJAe!ozuKYQw4}7Qcf5u<(njU! z@T+TzWzR41%i(Xo)Lb@_pmd$N3{4%yjSt2^@JQDNM1K)~vX|0$%J5Uicxg(IOW6kn zUAVA|51}A*g0l^Q$UT63}?R4mdmb>18tatMic9TaoBF5;wTdqww>uLi~$ey zh%>=eh~kJ1WI?~SeQ??OZb2dPW2P#b3o1V48`T*NoA<7udH9Psp;3B~?x3!>-^NyA zet(vLAi>-+Dgw?)9g8{IdTkBLp5Fo8$48K^o1hO5@>cMOxqhGCc7;n61*=Uv zQT(-Vai{_dPK_f^;O6b{`gHZ`klF#z9pL!vpmVvGTe|oRhR#RcTIev*ZhIV?`Kqga zczi+vJKmiLj2F>;QxfUK4w#aJk*sI~n* z6;P97kdnl8*XbNSjcNwCn=$(EjszEE5j9N8p)Hs^)PL%AFz0u$K2^PPgCK`}+W!ay zSxa0?I}|?tb>}5;sU4PmBvQmk0}ke_gn#>^c`Su8z=gKsxgH*wLFl)@|DFvqdMr-{ z2C(T2*#d8Z`p@z&U%o*1Tx1XjZK5m`3t9B+xT;eY>G3L*hDW2xoMcc<8 z-3Lt;k}J}IgzrL+njEFo+L*c29$x29?IYZoK=O{q_5bm{er15!>s50@%}wJAHZc3c z7=NDdUXQWTRtebCmuTx9xvge5vS*y#(qdveC4(rh@&b4T6gr_IZb@ZNPjMS_c);fn6myg$(khba z#z;h3?I?|>0w`#teS{0XD@WVZUZ_evnMt&|M;RY4dcg!rtgUBr&*!J{0sR}!Vz3#< z6)OX{*0+AaMFCx;8bpSPGs$GsHKEKUT%LNC#hHqux3mPz@ zOyR-z%ZfikpWjaIQG;8ltY~*{Md2y3u~zcNh);QLW7qtWrf;c{jLnlL3q4e`$}ah4 zWDCmB0FOFQb@C$H?c3&Z3JOAj9Ku>6TU4(Hm&*Xkk+1o+PN0$5_LnHa3f_hP2j)1G zfDPyNWO<@zFXud-(ZM=sUIA#4i>pwp$#yimyx5$|tcj)T9eMCJHg84GNNBS4vS>Hj zE>UxZFa=QG5+pA6vf+s5E8y}Q!_zOh3F6U1Q?|1$*Iqo8HQGc(6h4#_RH6;C@EW`wU>Q;nEadC4y;TGd~7`v6Oeb=xh z#`R=>BXItMaTy1uK4S9xSTo}uZcnRno;IQu`XoMu;M~`yUnEM>s2ML172kTjb%LmS zNH;P#mF@q~*%Au@c*I2-=Ay4)bfRWl&6OneX8Q05fXoBw2%>7IDuT@j^(!hK6QM2yj;nYOMhOES2b$_=0ay*UFPmxY*+K@ z-f>xnh}>_-jjOiuaCpz+@*q}6gTvphgC~3OTIMzFjY{A+*4anwuFS4he8i^azJT@0 zp8m%b6V|LzKRRmzbZ1oIaX;f@hpROJWQ9i`hF0DAa{$cDe>icZe7lOHhq-#DI6WG% z`r<+NDGMlQ0Q&v{QegUc@)i#Zls;y0u8ZK>#E9U`#F)~LxQpc=XDn{p!lRFt7%#QZ zu@@RLFxl2o7m8HE+n9eA)Vlu}>t7#*d#7aB4DYut+N)1D}i&Ezlw@k^t zdtldeJu_gTUsLYeS&gu z9Qx@n9bS~93g_~X+0Xq(5SCS{1|W&!MX**|vv^@VR(Y`LX(%d}1$ zCbvL)NmqNc=6rA_w;yVc39=FSC@R1GpbRsmPcc?zk9#@u4BE{S3+)Sh77nIZ@jy zEStV`UG90WaOjZ^-_r9d(3SyG^JL9=v+mc*r_M7SsAb`2xS5pA)YVya4Grt>jt@Ah zo*&b4q8-;bI2=yDKQfRH9DFUF)HSuVL2@c$*P-6jpS;Jbd7eA^$L@+ z+NFXSUsq@Rv}aNJx_|nNgyWS<;#fU-DJe(9^Na@C5{-{X^_r61n%jnE*;y!6ET&=v zIitDg5b~v%4gli`1eZtPQk;j`RSAdB9&zWcFb~VZ;iidZfZ;258hfAxisp?)=_54xN0fD~LKZ?;|f{P}aR|NCQP#Y7Ta$HRTO z^GUINxz`(s^9=LYo&_FT&$)X!lBGN;N|ka*v-}`}&X_6jK(Zf&%4>ZrQ7KA7;e1>$ zWVfx&rs|7d6R>t_Go1r}+F&FzcgZX@8yhqa_xR$J~FyuFqY zDkNbX3E!Rm#>N=u8p9i*Y^4ErePmskK-|}GTI_YLi<4lsEzFzcF@L^XrP{g0?$HT8 zqw;g5TxYcYkAt>c)|eQ=o@9rf!}U4+wWl}SxEB^5-NRexeC+6DkhK) zc@jSkWu_%gXQwz9d*p|!N7`M@*Y&Fu=kBwuoSZY!Z*Mqp-Hx)sBXRu|fjolk7W;B^ z19o9K(#G0-;qsEv8#%9jHW~*!pboEI$_->BMt&TV1*C{P%!EBSP(Rp>vtX1&kFDN) zPvIK>hktt&(C#{Pic;Aya-0HR!P{JUD@A#R*yY!JWepBLOrP-vLI)>)H5m@`~H1hLtQ=bYR5x6)ko$q`)&K7c3!B0w1UFswQJY3Hm!S* z-*%|lT}CWlS1-K{OH_TWH0%9TQKNH4Za7XaAl~OD3sAM1=RyaUT~@vp%$dgYsz>0u zY*1Ty@52%|gdV{BYA9yr{D^17{q?CEA|wHr>hsq>Yi`~Jh}dh9tL{#wv(g+ZIJn)T zHl8#-ZvHPQ^UgCxP&0?i(xBQufP4wIt+0jaa#;`aBY!Fp2AYZEp>q z<%}--Ft!?sk{)ttNf0d{+%QM)he7j%?B9?rXhIp@M%PMEF_dsHLFWO2JYATWAeu$4 zWH=*Ao{XvZj%?-S%hNhxu}uVl1b(Dk>Q@>F>(1sSds6TsKvx!vo__7$Pe_IF^hnF{ z%pHSCr{z+X>DV$M@-Hwl=4#9sb6{h7K*8 zmYuQLvU|D%A>qunLsVhHNQ#q3(16lNOMae_K<6pa?|SshH5&-P6Dm_BP1Ue0FOwKj zcEYQ(7Dk}d{42Mh(5e62a_RFo` zkfEGIG%+Zet(0b}U=@o`fa{hN|8;;zJY@Wo{~0ms@j%Pu#3X) z(KsgA(YhpBy9ja7*49>0Oy?92?&D11;D3FI9Q^{f;$J6&T?af?r>yIQr95*U0Bbsj#BT%FUa!E{zxuCyj^ zI5Y{=#cKVd3RCKcKSEFj;?IaGCT5spENcRVD*SATJVV^G9`wojS9c-ulptc79G&8 z$ildPL-Gz8zGl_LnFNNE8tq_?)FjgSFbT~sQ#5d@UW}%uQ5}SeF=R>V(fY}G0@{)5 za)=_By-?5hD?2x|Hvn{a`?fdc=&qBIOaE$6!ywxg>@26qF|e(#;gO&E{>ulePi}(Q z)ACXL0ky|MCTTv&HRPyG zK(kYr+m5G**@T*W*aAJ51@D=u$wiF(m$VAlf791pefL{Z7(98qbbwo7$0a`H&e1K# z-?Lh9SY{J84iZWW%K!A7&W5=hKtQRmrCESW$-(N*T*AEGs732To^yfE%u-(h8~Kkm zpLqu1I(UTFfz`p}-$P=6BFEG;ItTMsq;=*`T3Q*wUc7pkU*R`{#Z(vO;J#^ z?Zjm_=wYk9E!dQWKw-mE@z4_{r1M&?k2#qQwvra>pVY!#C?|-f3%k?DT#T+{BD%uwsU%u-WZKB&Jp}y zS_pRlAvLn`{-i5%UFf}NF@YcsmwO-5FbIkqY=?uup_7geOz7z7HgjmnGz`BOFr<7h z%*RjB|Dr@62BY$%Iypu|W*i@6UcYwj%tb=hHvWBJ%|^DpMO)X(Tm%Se^}~PuBj(#o zt!toRg9W31u|wyeb~*><=vOJG3APt=7wx_CA2sxz+t=SD1NxC@hpKDgZ>%>tKwoYD zl!*w?OM%GX7C-&xC^Zra!{$3f45%wVUQK{p>^|%?XP?#IbsSvz8xJMJpx7Q;E`o5hkvqHLu+g3 z;_Q*dU@T;@ZF_3-4OfHE%(;g$Q@-#E3P9|ma(ODtk%qPuFo3SBB;R<4*Q^*^Qtp%t z$dgR5wvU&-i_Bix1X|tV3O0x{4BdqBzYFgBp&%ViVk)@pdgCs1Gj5#YzV-awg#?_~{C@i`i+&ln$mJ*I<#C!XP# zwE4{9CTp+3?J^|X*FJp`pw9->+9)sPC|=VtV`olJUZBx*JfCeoof)_pWmAO2Ef*`~ zy3R&go`W?Xs?Quz&0rwrwJ!eSZ=*S6`UySeD%sd$@62sRlx28W+$s>?st=Ae~cii@Gp8Omz7Y=y^j#H9Rtq0n81JNmr5@kN|{5Q@{|p zMAtd9FZfR!h9*O@wgCC`8`Y4YD1eHSAMA;uY6CU^$MI^1h(uPm|7Epk%E+*E4-@UV zD70`jroMTcGE4#Bw{zVgje1YR(eEJC3f%-)aR%Y$RcJ^TsafT#bYBMlu`cAF@Jw*A zq`nDO-1MK!tHOR zfPu#q8=hLahOi?AKpEse0)-&N0EMt7Yg4-k*WSnR6nw_1i1KV|fk0H#|FQtsE6Uu| zfV->?=hXVB`Pc3%`cQ3{1Rt$+q$Gzyf%7wEA8pY6l^Y_I;m3ve467Rr5+Go$A>~eC zSzUFvNUuBwuWjSWAz=z&BwdAB{}vPYXCxdWvlP6hinWu`+_wP%v~axUjnLhKq+;>X zb_0Mgvn*)yN$0uFWgm&*@IHBO5`KQCfkv>}x?K*Nn4}O{@^H>w@t07Wx143HwL}A+ zf`d+xxqkoB#R$u9@5v4S5iA`AhsW3SY=_!)LNM07aXtXsjXqdWVn3|3S z*Jt9wxH&QhmISho63V%_7ApXZ(~q|n*mvdC^yc*l$>2TXrp?9(^CxcBdiKdqm?Fjj zw{T`OY?N!GlTk^7ngOR(3nNv%$|{rc&Y-W?_wkZne;}?8p9LnSM=tuaEwJLaiI>CR zf5<$8>P8$bjO09h7M*PsQM;Nx4FjT6PV??O1+aAu-HEDq6e*?u7eWa4)x>I)bHB$| z7?dR$5l+o;_(u}%)75~HSs)wJn^x{_t!X4DJMF0qbeGmB zf`@Dt03F9F%Qf$ayd&-$54A0!yvIm1_vkJ-tKL%;_?A0mn6m|sNW?~z`nyUiqb2j* zN8Wx)5QG`i)0zN|pBB*RUn1Vc#SKt2;XyxnOS`yn2&?u1=i+hSF*gSKSStVHFz2sN z{K_JWq`7Jrn|I|N^Y`ya)}(V|Ivt_Ms$7_ zDpZ#~utZxeH9YdvVk(!)NnGU5)=5a}&nBv?g5o#S$cuq8!A$q&9xPhkQz&>=wG#V1 z&0ep~>}}s-w#(}Y&Md#=(i^+M)~_H6KMOL&XqBpXraB1|j<_wo8=aAh0|#em^xGDo zWc*W?(j{gKc<`??rz`jEJlH)Al7H(%Q&m(LnC5US+bW$RS37{X!8LG#J=tmy$tnJi z?Xp(}!rdN~(SEuLmPVv3YjNh8S$b0{a|eJ+Zlu=VljJNH!#7%0K+(1IS< z6QAl_(Pe5BqlzvA7yb`?Pg-(xC>n-aXinVaA0r-(kP*itW9%LvGDq{zmq+8CNDsk#27 zL$$oK5Td$t>5`(iPcqfXe+(qnYuCZn*MJPi@B53X;g}Kj4y`DOxW~vnX<=$8M00OW zl};MYZD6kF^Xbd3GQ9cSlnWh<>-s0Zi0BxH7&xp&k8XKny5M&Y^M3EjBeHXQ1 zz5>?Q4e%V^n_k2bx2f((LKVIc znzH>Rp_^Aao@XJRnKoF=;{y*i=RltXV!iqnyW*H6SrhKsFbf%K3)(E-Mb|Tfmx$3C zei60>t+LXs383fV;5i~^JdE9Up23bmyl;*5&K*T-J|7O4qQIjEOlYtF9}`*;4dovk z5CP&uONmgG>77X*1*p(}+7yTeK^MnH*nj?r6xpIwP40`6mXPrMet38|1%j;Pl+pfx zmWcGQf`Xb#N>*+M2L~q=FH|^N+?NX1{VZmd1Y zeIRe+nm0V=9{?n|Ah7PFkg;*vk(3WLYTFAu=nfpwMDF_Q6=|g9IM7X^~`1&;4$o+Ml%i?OTyf7QiJf z;5i)`i0DY0drd#8NS~#qe-JO+QJELK_@DNsmcBoDFY*p0VOxgU8-Ut%enP_;j(B!5 n1mxg80$vhk#E7wG=HmKFrOKFdC3^}G{{v~M>BGxao`nA&fyB`d diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album_pressed.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/album_pressed.png deleted file mode 100644 index aa873424ebb9921081bbb9618875fc410bf9c84d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9982 zcmYLvcRW>p{QohtbID%U4cRkWk#6J)xrDBjz4zWD<(h?y?2=2yCF9!JghJO0nPn?0 zJNceI-|yr1`{Um8$2pICU+?#Mzh1BBc*Q-`)ug3jrvdPd!y-Cn?8k*a`h%++qaMvRfV!m_U_-HT&!_&2WSO9uICNeqPgsHi9c zEr;AqX}0^BGQHwF5o6t%Nbc1j3!bL{OOnq;nEys(E1aI&UQ% zh|Bu$_wQe9I$}j|&v91qg;AzQtkua|jkkMmai$Hw9A`U!ybh#%`8ZJ& ztCE7&C69bdzlY{rePahhuS)?Z5LozKmkV5m5b|Lszgl4&M4gisDB)dp0-)4NmXFJ^ zaqG8%#u3(o`;%QWQ4z)@aEfXeG3nFge*ahRIsHO0QC*QzeKaeA4G#;O>7eCCI3?Si z`43xh>%MT6wuiO)5AD$Fci^oJ4GjU^t3x>v1`lOr^vhj<8t;`s>|yYQ8_*FU(chDu zH}YExXe=!$St}MZt@rI+ZTn7CW8E9yrt!A(b)X2qIet3gDSkba9dNHS6qcu@rSS_|3hwpm~PBaClp~C9}E4zJtiefZDYn`3Yy)IVutd zvD)td18L``BuxOe$mRR=Lpl^Cl>Mhl|?Oj4S59mo<7 z$}_Jw;$k14shm1sRuv^kNRFke2NSZ7oBB*PB{B{VUby(c?iFP zJ8%xjUjE+RUeepQZ=Z$wAe|6$es~)oJ44Uo7Jdor$8OPM$|DaM7P;D*Aie^XY(<+`dW~E zUzr%NeEt^-ky1$3>SW*rzOg@F66@~nR(%j<`LXt$QMrrCh%yhrDr!MR5>5e;KqA}_ zQO3ohR9ESFdlb?Ai;n(`r&q|yA#<|yqNK*__)#JJT^zmcrleSAFpiqE5@oNw|=7P7Pgwj9GJu2J~ z;ZuWMmTm!y_&ANj65R$y0#o0sC4p1SoI`aaePKL0lei-%=jN9SFrT|%XMRj{P~@OY z+`1CnY$k^Ah%~WYNy^E(RqQ8wo~ZQ0KSlBM1Uh)@Lzl1sB=_UrBG=SH7a~|7b6-BI zoeFvp3hx=a&eX*nbJ~3xAb$Gv=?EJKN6Ss6XgyIT$N~;?W@b$;6d+2YR)onU=~xip z;`wMu(!qo%<>6b=KXRETZrf?60>qQ7&(fs-U?$JHwUkSytLT9geEZhSDy4MB9GL&@nxoIRc7rID4tQOIa6YKA%1>N*Hc9L7ted^liMPhlS;>H zvH?-*XgA8SfN(NkKF65V1;YJKcbqqP`8~ZykQr{N}3Rhh%6F-j> z!dCkJTtgI`Ql*ZEkwQj{5N@7a549G;nRmkq3_9mG%7VDvg|diyjxP=r{&x8-!ub$a z&5XKJGGjU3RGPdK*^5C7p`D>Q{tMBnX}^erpzuiU?N>(&iBh+Zlscp}VV zdIw_r4qC6f;Mt9pg-tX6J$W50rDuHNKf`hO119y{$Z?`nz6Tj-3y;Jm5#8zi^Iza0 zUKWM_Df@)3rC88mg6c<;P5GvUD*rtee=g|M(oKvUJO+S;W>a~XlKc>yzLba$G1PrW zTYOrxl7N!W!F0|9@>`uhRzn2IhPWv^wlPBVbVaZag@L%#W+{b=10&w$%yguOH*28Q z3>Ms`OFCHa+wNVSnGYey15SJ;+GJqDj40cy*j9aG)c$xkR%pk1w7)au>9QGdmHRlV zz4mdfEbU9dx4oHMOp2w6#J+VVnQ5Ap`wPBOBvnd7tHkb9&|JH@0~Otcjnwi?b^Bb%?+hqQ^kc=w6Nw|p??$wjq>M>);$p0_kIFZvZAI4xAq4JmM=L*u?9r%S&DV6FxW zGv9yD)1>WkamLrhNW@o0xxRd7dNk}~jol|YlR8HN5+_?_ZMOwbH~!LW9M($BjvEG+ zoNLTJkJdjCa8NNss}GYhH}B?)!;tG@ppQzq$8`eJem=@L8g&oxeSE6_1;ZJ zg~&|?)8Jws;`{WZ<=v8Xl~mUzihPj$_fDrBu^5RfKzUi&wt!n=kXEhqr@y#b@FXBI zW2W=hXeYtm=O*hHdVd$&A8XQ*c`9bxxH z06s+p6~BFo4s1dW_FnL%3#A}yO4c5#+h@hN2#jO5GroXz(^3Oo|16-wI>nmk{{T*& z@l_7nzeEt@hO+Dm%>E@Y&;o0$Zy!V5sB~Wut|%JmBQ^`mM4!YA7Q+w6z)XBE;dEKOs z!1GruU{l|LF7L@5QFMF$+})LIX>X`|R6fBHlz*(P&EHmvzdfB=&*#EOZCE}bo4>Xf zXXKjjXMfdKF`E%eK;rq0x@t)SoN;;7g(my{0vw#2+gv<6?YZdH!B_j2z+xy)mSsiA z^a@H1G2jzYKlBCdxMF7dRFMdV{b9gWY0f=Mzh?+Sg6bGWMz|LVQA<8_`2gZ&%KH2| z`ov`8EdRjvhv@L}g#mhfE5ll~0{?g1weEoE$49`k=vky@-K2}?IwFMNZ;e$@`}-Jw@$2XUlPsbTWF3Fk|FAYIz{ z4uzLgOXDm>e3v&*maa03TBRCXL>{cOXD;{|ISzhA_T=Ly@i&fC9dQc;C7t~=PSVbO zNt%Y4`+S8WNm7|8A~>QHQ0tQtC`{^m<>%$Qa*V@AVp3&(`t)54_%}}mZ$>m2b<$v| zd4JvMkr9PE;tpvtPuWD!Hfn!N@spNy^cJag!^DEvKa_wcblUcOqoboEg+tEOu}&8k z%BL-~Kn%#6B61){k|h5?iymCAvP1df$B$PuG&KWIR1yQICs9q-w&-t~IMdbW7+4MUvKW!FKgVgdvB z$_G)nRn)uUpA~DKwSNt8ha$yvo~n@)ycxX9Bd(&0LG#ThTs`4=eUVcVc>XLkdrqYX zeV^U9Lr>}uV-@&fNR*!NDN$$D52ymKn`A# z0I|I~S0EQd`bK8i!#OyRoi5a~NJDC_Penr?}PwInQzkc1w zgKop|l`4zSN;`36lJa0(fDu-i z6a%XTk&JHTEq~nMrp_gqC*=SV>Xv*7uZNVzgQA>K^5lbXED5V<$Ntu?j>k%?#^z!_A7vSwV-HNbgEH@eanR z@G5-xuiKILg-%ZrUUa(76lC_wyM4w;=rC#>6pNhM;AC6S25r~-El`Fu7@38ybu3P} z2-)gr(mS4^tk31zZ_DEF+PBm>?jf=T{(&QYxbM9**CRj8o)^McaqR@lloK$f()+G> zvylS}kX&%*0wA!806LPqNO%h+IBW)IR=u7N?wzmuK9^sm9%D#A)54UHE<==(N@bM1 zztrG4*;_kbP?z1LYOY+@;?~ppUcKqM!F%rO2Xe6B4pyJd zA0o`k4qV(*aor!NlpH7D=aSM{#Apd3UMSV_Yws?#lc<70FEo$f&zG zq#VXf2D4TEuP}gYFf3N-ths48X{(NoQ179x$ry9W+A-IyAtxsfR+w=G09ORl_9RMT zAXpvocMk+C5nPaqoxnj~LHa#c?D%uvMtpt(#yita=!NiE+hr!)x8d20Y29S`zq&*t zM$g|Hl8D~VO<2m$&wtzC3Sump*bELAA^tTh3$`2dkP&$7xL{mD^+zFOxQ7SPN7wVC z#P33?IQ-uyDv$SsiZGUBk(9=%r|czl%S=DYf{?zywhD@3+;fRnj@_9(zdmV}sxQiK zRjK8Z5Y2KE@`1NzA43bS068jJEH1bzq3w4qYEnWg4ZvayGu`18)RqlOP?d>*L|9O| z33Dv;14|)EU!!x-CkyGwO1wOh?csp!a^+?iIxwu{7F5cM;RHAF#{gZI_4n>yE?gY(GT)%{od$=92@*D2h`$ zk{;m%Wf=Ec%vluKn0l(`hcu&%SZ(KP3&#s^c*BjK1&`V{rd4ZF)5t6rjzK1O_3>^v zhR8ZSR-UH&5l9&8$^1lZ*EAU}tXr{j;fEl#5{FbPYq`F144?dVV`E|K@GKkDR)$Ua z>&b$9<138HMQ)AM2HhK{q32#q_}&OxUi!J7R_i)l^DKc`Y#B)>aicJR-Nt(E?7M5q z)NwMC=>gKEfOGgwF=p`F9RJU#drm3&?-$Rt$tl(*!z?(xJUn&|a z!ehE4K*gw@Yfx4}0OEn29vMX)+|io_JFd~i^!$eqH|HXiUSOI8pkCIHmy@e^JU{yR zU{+3t!iy7jTj7xv{0|Ejqto#~`=b!nwrb;XIXB+0I;$(TudmMnA9jA^Kvg`Iy9xJT zvSB_;4!8+ot5!(Jgy?fY-)VB*o>)kAp1MU9n3;MBS1RfGDuIoDW6_0U)h@_OpU?eO zV5OZ&NrB9I6Pk2f{{x9Xlwah3iz$Yp(aAD>p%P?9S+$;Fy_=0X!Stk>41}w3@1$Sg zJ;2K4m98$kaJ<6{%Zb1LY*e{&J}?F2H&+z!O{xmB&jENMzZF53b1=wfdvumN?Mp+Z8Uq#l(h<0s|p$Wd~1#>Q#MUPg6(t+{sgsNf8V z7zJvGBD%x?MbM+n%VkpTnwyh5o#WJd&CbDLIu>91=kC>8zuP$j7o$R*W#5u|alBn& z0X%6+eu*x#avlXU{`%#{BPlB>xm$z4?n?r#(2W>+!eQSld+E0~7{p>486c{dcRtn1 zbMNrs4vz*xgjI#ozDK>y`{X+}+*%#XL~(sKRCR zdA-g#BWdRwD^~BscpdY5vxk5}7HL=;KD zsGK;2%Oi2g^7sMD)~D^jBsVTHT1pJ{KF?se(O>7*=K0@XShLJq2i~!5F63Yp&@s)n zVWCiuKB2{q3*CG8S@BR%L=5X%{$t|e;$md$=g%;7d5XQ=?~3^TqE?4N=abR{{zB>` zDY22k<(H=0(?2F$+g#KFZ=jHwOzQjemjcvA76}keXSA;yD~agiBBHt%vfn;WcpJ5;hDQ1WqoI7 z=Vi0QNd68#0)C%Rc{rFIY1S~l?Kj7P!FJIan1Z=tc zK+f@*(G=|;4AI+?{zA$FIPwCb(G)FpeD^)rJ8O{j|&Ll@jFVceS zgb(j98V<@eS9YdrKg#X(7YwS^stK)(0h0PEVQm`ltt6?Sko=kv)>xl(9CPtw&-+?oMwZF&69(=&;ivxn*wDCT5ldiunPs-txiwk8h(LlbYY>ruB60a-Z)n4r^<+_aruO_Vlkt7>(i9neuh)OVA$aB$!sYM zi^=sy8(d8v8X`WZ9CWWZDxr&BOq8VAsb12rnzEF}7j=gNR^;kvO~@VvwY?=MaZE%H z$AJn(IBZ$@Mb}y^tWbj0X8yKf$8@TlGHn}QzTYGFNzr^3)!!AQ%XVx<011%DI!Y#cdKtXDq-{DlNn|y5R zh%;^>GeaEAj`q-6{`R!GNi*jV%N(MY>zQAKEhQjsH~aXG^s~$QK`@p8g0a}9wsCLC zd$8|}tl<+VLq6<`N#^mO#kI2-N|6z3)i}}jvh|F^#y&89gzP2A?>7C@(6?<9n`SiPnjB;3 zJr)P`hIR$d{WHku_iPZQO`9zyw3ED8CVB$9&FAZ5Y$H@J*JVEmZz%&Rg?I}a?4Bqb zB!;Rqcg52L()I<<{ffvM;WK+fF)G?Zr<#8qskcR2)c0`ZGm$6q7)nlr-o@c?AbCh_zWA2Mt~^%>f=);8S4<)U=@$7f1VoTh@;;SzrR zSWzkkwVN&UJI_*~vWe^9qP zjQYT|E%Y>Xe%>M+zj-EU$Z_s3HB<%JBgJAS>~nsCJZ>s5|Kq@C8&k85#vqVjhe`mU z0!D+iq`LB3T)7k9M$P!MaAfFpBb(2s^}p-o*C6ddT>NYQUv;=4@?J=Uo`K+;vu~Q5 znNsgBw4wBs7=2=o9q018n^;f&=X0=E9k6Q);+oB&9QjLo@tGi^t~?NB=&#YX=p=j~ zyLI%{CdB{Ow}C37+6jq<6#Y@(5;AANvvBfk;Pm;p%k zA+(BgT??Ob#7QLAzGqT1sxTo;Io=Fps@9si^fjak)YwgMyVzI2e%fi7A-*aGR?!%+ z+W*XRCgX)OK~8@RuIe}hV3BN`?CjZSWshL8I3vjIp=ATH9J%X!Q!6tI8fsc&ADE$q zFIa7kT6+9x($I4vdZ;O9GOJ}!3uArW8ZY`jiyag^rR=-f5r|W7V9~f7)|WX;Q&J?9 zLFONnA7G6$3c*Y;sQ;0Lg-;`Uy+#hmUGUlUZyLz&|9(Ad)MMw3_gw12|EunIF~MUR z1*ye#k$5tNlF7xzMW3VX>Fz3{db_$j1|B6MK8olGg0QtL1+00H)hCY0OUn=ijUqFX z$bCD}tpsUeR5OW~WoSO1U!s4ZkMMdkRFlqpe}cHut`4#%jKz#yt(Q@Yp}%skgzQlc zfw}Z{UQ!#s&bdj)@x>K%zueZUr*HG1Vg@cpMy*gM#l>y^Dk!>e=}sf4i>!%+R;~bx z?56fw)W>|p6e4z}iD83)J!#J@U1gDonB)X8W$5XzQB?cwI3`BWi`n82g9j-i0FFH3> zL!9yh@5M<$`$Y(QJ9|7?hlp?R?x#D*YT1QiL6cvasl!H@$3AlbK*vMY=Pu7HcBH|D=6Ido$!wPu6(HQw)fA=f{PVLIjuN+yXyhUk7> zd@dzm>GSBmOJ0_4Wi#gYDflk5D2v*3K}U;Mmb421$SeNM0z`*bn@fkWhW+Pm(Uq1a!~du8XhRiQG*saxO}AxJGSumpg{fi|F&|3HjA7*G*zA7}#*9rpiO|N(6!9H; zP7qqaLz`?8bpEXNdyogM0&!|xvdfJ|xdzt@r#NgHErO?&#);L@we)-Cl+V{N)JjB` zapFw?A2H=ruCi=Y;m!o=GAugWE;AYocEHc7@phuidGqAV24?(G2Va;wU=`K2qpAxp zXApErCzBvrsL@UUy@9&nNySI^;NL1RsJzUXrhgGCq#aS4U+E8efnM)w{Lxz(s(ltJ z){z{G4&(}%5qSB>6nDr=dB*=MRap)8l238)d*wFS=RLmji5gv%vl^R9X0+L)V)8dR zc2Cs{lo#0-hREG_&**3At0k6b((f2k(oU7RQ%2_5tb}yK3pd?M>|$ zv-5tR+X(?G1;=umA4Hth<@*ms`(j3@e1dK{Yi2 zMAjN`P9?Pj?6e6c6}by-j{gO+IPVw|FfSO)8SFn!??YfSCZ&eIN++CWm(j|n=O8eW zXHW7&2ZH8*-m*z-VRkxW`iogCzE@<@*T&oE6oK`Yf=E4d-AXeVq9h@M?6C3hEGQ@` zHIJ1U5P^^J2J33B|GN2kjSwomFEo9H(-g8ho*Jyrw}41v>Mcw)-P2YPlq>^NmDkHOHYZaOTdIeR_Am#Z zI$_c5j_h2w@0x^#WBlYc-?TqCZQ;fGk+?&juwcbT8%=F*;g;xyxSNwAu2cP!BJw?1 z<5}feP+FC_4qI>G!4ZY%0#T#~lkA|Bx2!4zuwW#z&g%V|4O^Njm84x zOD5_sN{WV>fD=8YM2l`tJ9?{GvFs2h;K`y&6q>Ohi0B4!*Al!3P~ZeIcncq}H)Spc zaa@VaQ5k7zGAh57|2o2|#g|j97`XS`svm0gUeujH#7w7_4G7<7SC1g)h{9QRJ#k5qPw zQMf);dafxYW@wrW)2W{h@yhGuoX$xav}M7612Ig52w|NxUllchB>ex5*5pNXMM&Hq zL!0*`bmw?%ZfrC;eaaA1ZnpYnPV#bKzzpv^!3#Eoh_xv2YJ)|r^#s=y9+7iGF$#6D`ri{5JK48_P6N?Ad8&Yzg&S9!X_F@ zJf5T@17&6Hd!^Y-0LDI=5!+^zMAmviVal60qR7mElc)Oz@ISneTMq?(^+IEk4lls- z_4S0-frzWX!o8lIXI3Y3H9dJbr`nN?(=OzA%)TV(zjR4%e>W_%VikXZEZa}=&;LoO zOV|jSuV428yM;au?=w-Xk}lp{!2o3&`f_%-B?1}+G}$XO+#?4kM>!t9GvV?JX$51K zJT7(AzzK8szRza+DRX!L>vO%Je0c30+~A3C}uA=>4pdXse19vo-` NwC?G`E7hMw{|}{%Ewlgt diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/back_btn.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/back_btn.png deleted file mode 100644 index ff121e85f5614dfd022f39627028af825a46d683..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 455 zcmV;&0XY7NP)Px$fk{L`R7ee_)juf2aTv$(yX$hI{E2}ivam=p$Rx_3WKfbxN~EM@k(5%BO-WfU zWl@q%HpwItU5P~`e@e=q$e-iq{oFle{yooApZi!ouPf;*5^-D*1ZCuuOqv~5> zrA!dR1lbdTyC*fFAx1H>N#tHgV`xMM43|aVK1sV3na&VF@JshukwHbI#;r&f<8Or) ztVj#Mn<8sgqz>RwksUf78e&vIt`s?>^DIaa!;~UtbcqGYWq6>-4P9kH>^Eu$f78R5Cr_+ovj*@l7te|F0k6$6}HucYmJGEE%xis4oBj002ovPDHLkV1m`y!3qEX diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/more_menu.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/more_menu.png deleted file mode 100644 index edf9f3ccced5afeb71d9516d93ea19f26c7d9984..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 414 zcmV;P0b%}$P)0`;fA>{1X_Il!ymh=^7u{{Rl_|GL>6mPM;1WY$RjBH$w zWyNH^^e|SD$^D5?A`~MKi>Dn*gkl6@@w7vUP>et<-f}zi4a_uOC8db_zyJUM07*qo IM6N<$f|TdAY5)KL diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start.png deleted file mode 100644 index 94ab0817247bfa462d539237441cdc5795f1fdb0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6166 zcmV+x80qJUP)Py1&`Cr=RCodHT@7>_#hstom2IsY4iJA40?>5GWG@v`qE7vfAic|3b>> z(ICE-GE&DRvctEedwsUXu<&A;EDjr#BDc7PAyD?t zlcF6>f6&y#KBgx&6K1Re1FqLXEvIL~gb{Xdru1xS)F45|h>tN=hp+_*!BU%9QGr$-_F2;S*SB~7yOmaHVMQ@tymQ(No%JopN1Bfh zS!;eQN`6(})^lm54`)sY@PG!^>6xCFGA-KK^v@90tI$&p84CFtI0Rzt)l8o1Ke-t*H^0a{Ew@$l4{hLJcAKb*5r&Rq0-tLxh%7Z+2e zli#fz=3~nwEwp+kdX8oeIK)&byFf6^Jt-_e?t3?^-$@7 z+F;kVZ26p=UNOwaL5_x650j`dk4nai-at>p$-oyw8d@nTmEHT@Y2!QU~l#Ms8e=yi4Nk?k(PR*;wj0mt`zinrnHSnR;R z5n1}P66o5Tqdnqr8UpdK|Cof%?w~~4u8ceAX1Z{TnwF((w~p`m%7#pZRE+WRMhsi# z^XA}VcN2+TAmR`A%qS*_bch z(K<`pZ@dN&&ai->y5#sVs70%rLM7Pq8%g%gg7L%}h~CXIt+~$$^p|bSr*`-^VcqUz z4g!cUs6>CJ1S7XN0Te+)X%Hw|&j&N6U1k$0-2)=d6pUAz4=!o}0(_#Oh5aXUGj;(; z`4Qmujq2Lglb|UnnKUd}MdgQAP>>A)7M|0nCHDOd!=QSM3!hZRlf`sFB%Uq*Sf!Uu z?2c6fL^;JH5SRDE29hbfBrxDYbuW90JvXbq_>xC1!PcISHz2l0{V^xO+eq###jk1P z=8C7;5Qv8S%Pb@<~3VYl+qV5mB^_)Jz{_!?fc(|?Xr6xepQRg_&g0=9_V!pRYDIu!xa_l(X?rykGUO5p8x$AV75d}D!P zg_pq|n>}9B_J0JEEH4(JKTtg6cp8X&T5PlJAM+JUp?JUi2%P)cKFosma6+&MeYmQV zHGF7QEMY|#5dUqSV)GU{KU|D`H@0@o#l=!v6!WQJ z|A~5^v5hf(gSwAZtV5e(L9T@Nz3n~Eg7cPJZ84ZJ#fldi7R-F?$vHlfV2@Z1dEl|r zS11wUJ$E4B3tqSkTQo1nL}8_`!FN}&R2QsX(+=!ifnS2@7p31soBgs_V#NE-Mu5^~ ze1rO!c~&i$`M`KfEU2K4xdNuz?_2jG6bR&lK*3^{QfURc-xy{=&|viZCZEbg9)W;B zKp@}<1jHhiNI#CpI}eUL|-FaJ{p_Ng2#OBhD%UCRtUs8{om6Kb{Ag(RQD3@ zCF_UzmR<0eFW(A8zRE^GC(;*I3$X@$G53pY-RBDPW7)_|ea*{v`(<9tm*~^)K!IGIf6eGK&UGVbP>46+?=JP@ z54Q6C!ftGXQ)#XX?s$_)l$+i2V{&fdy5M|h%{klc?QA)=&(KZ($DF$*sb-p+{Qhzf zhB?qi8uweG)YV)|7wy3D3F0aT0y%rWKBoU9Nf@Q;-NtxH+S4NF1%F_Q;*kGJzTqkD zvA8V(0y&uvJ%nj6fUwUaQ*&Eg3Fs=!~5mjv!%mE>eT#XIg&sM7AhiFapK z?C}GPO*ay36DA~&idpe69e0jR$AIu=<&eYg8R-O|Z10Qo z{1M36^W|SuW#k+pigD6UWwffat>8;7kRmI%KWOg#DqHP>`LZFyMKO+;5~4U8K-tsd z;nvgfBfcK1&hxcIoSz|_@e<6J5thM3F)JR#BHtLJv?rODk_GIknFlBDQzC--QozU- z6{0vBGMUr)zFsUezs#TVZ5FZpYVr{4)HgCRANREq?eM?Gy$+;iuKKtT#iL*W+F@lj z7(LO7r-s9rk8co$cd3WLv$G+yA4ttC;{6B+QJk9-6!|L;V5{Ye$hjN6;Qg827`C#n zX$QUsa82tKti!yM=_QfIEjU;M{_zFsrC7?jOlga361WT^Km&i#aQF9vDZ^vKdp>xE zx3YnAH(GHKWt2M}5x~H@UFeFlH|IeG;49?iGY|D=Lyt6Hlbb{9bTi#je&NDQd@J4Fx$!u+pwN5 zb2gD$2m(n_-08on+Re6Nzn#{c?%7H(Um;vIig!#=Y_K~`v#l!C6{{UhA-%V^OAS5x zU@CNi`BKraGhW@ZVsq;Stm`jo5 zLIoh|dB&;1N0M4K=;_F5T4Xd?nuB8uTkW3z=KWh{B7 zTax*7W?X@eaUy<|C&xbh-~=nP2<=Cp(nYav(h(1X(eB5(-rb7NlhpGjb-QqNaJ04A zQZi~jw;ys*Ln;DX6l2>ppB4YG(sacG+Y$aHwY@xU_I&u|fhp;c*27xKJfzRM6=5k6 zND;-+Q0rl(th&^7^^4tgGwQ*BmN)s`G_d_ZsRXdboz3Sb2z!%}M0ane|9B3!U_O^H-7qqfv*M->SImkT z`!;U_dT&pH(C_fLU_Oq5i!bO)+ss*UN7L1gHrrw>kpi>1M<~gB?D5tH{+E(LL|uY_ zDT)cX!^vh_j7Yr}oRk=Y%*SQ0rs^MRQT-!y;d8A_B6Jx75XE4`KS4hqa2n;wR?NQCM`jiPi z>DEo6|DjBpe3w(*y3u*BM?wANsV(R?ETA|; z&9W+>#8R`R<`Z#;B7lYGwODw*sy@gUp1U1NWoEKW=7UAc7Z&e9#nzWOt4MVg0`x6O zcBY8U7blbXyc2vIw9nOZuvd2?mE`}<_hpf<^a!{?#O8}if6v!1k2AWl%C{hq&wd2l zEMhaE(?90R^hBgN2m$wtShiv6#8_OP=Vf(&r0s8h!od~H=O7kKC#ByxIlBeU$$#n8 zS!_DZ+3w-T=2>WC=BhNd19^_&`2_Q2L#xc`NfB$BPYrsREi+jKRnMNARj+;?HI)~z z;XB$YL@=L)gkwU8SW13WN&fzNzv!DX2rqljiYIluJi*lOcFdS$JrrT-5a1#fDyVCH zhaP@xB{3dH5XYb)u>1i|4r!sN`8}?$(c-15%*!uhATNI+keki&)Rq zqNmG@YI4Gn`LReX41wyH#pZ(z+8MGbSGJHG{;lv>iaU+;7 z9g$Jr3lWlB%Qqx3y5CJiZ9mFD2Y~XtBW0z@5^fAa5!pu(}LzAL<3v) zrI(wL`6NRcpUlXNA1CA09-(x*SSZeq3FyKtDvtj82KEHC!BXeaOmE>wJp5we=@T?P z{t;xh>DEB@kY;b7Co(*zJ5q{W=QC%qg#Ao8^oYAF(dwjJUom6P2T!GYpyf@TwF`?E z^8}f45n0Z9q3;fv=1%QBcM8TE+b@xIwo9q>3z)zxz$96gsJwk_t93zXT*MM0P7t^8 zQ)TS=czsG+*NYx1eG?`ICwPNh&)7(Vqlz|Xv4mXXJLK@$m1a@wD4;=1)}D_OS{v+2 z9$Vxz!tJ2P6tP6Ure7B;7K#hO*z^(j>8BEgZb_GD*~-b<^JROXBGau|Y+FyeFajw2 z3v++V+Q%PssfqZ9@y_OHxk{>P<`W_|7kiBEIs7gE94jPKFi)6h5Vkni+arqITra!% zxe&2q`}Jbs)>b2tNV;6XEcJ<8HRR2F+^k;RNB_}7J)7%-JKoGyuw$Q#S*$Ieh9weL zLX*)iNhy7*HqgB_S4DY?UF}P{61&<~=|mETNes4D)+cmwI|@%)EaX#BIrCtpvYY92s+h&v_Z57e#w)bM-cKY({^V`% zemS3-GOE`yy%-NqovA1Kwpv}QB$Lzle#}gHMVde$9|ZCiyLEw`Z@|^TBUYh?PA|0L zA`}P|f)!Ya(nYYkg8Zu$wLMjyR2m}g7fX{llqrZX+5HAV@1`x=5%qI{N z2&5tq>uCP2LD>wYx$#~)cS}J|-~q1wsMi*eFzwd*edjRjhu3f z*}A<4`|d2X6-6^23(sj(_px21m+ew<>}H{ai1(b20AtIrg@v5LDdn0BLZu4_jj3PdOn$c8}C z%*P9+3%fpJR9e)}fb?@&mY>g7p-2}9*n$A|`_{7XVm|g0R|O!z9-BR$53B5Iifskk zMX)HhGze&&O*ew?Zo)cpx39VJxI(oUBT+jhVt1qp#54CH01d_shC!B_9xE7|9*;z) z*wKoTFrON3nXHDIjHfIEi5=AT=wQel<+J8y7jmvujuq3z7q+vz`^=*BJ( z{VtYdt{Y3@MOkBy0Au5vDYKpV5{xyY`}XifXoR;#L;mEiDy5Mk>JS8E zBCWzi;63zsYkUpk&!GzO_6(TsG^+^`kuS+~B8~N2gw=t7?L8lN zo1iq(1y}=2?&D+G8E~k=Lwy5mpug&gl7oDL2Vh8phAYukzjjPe*^Lnc%ZUr&w8^aA<3|pFGwv zW75!6XWluc=Szjh90Sh3;6VBjnW=e1d?*6(&S^6etZxNov1dWF?(tCi?Ydz1j-k1n zb>~Rt8wnzAtg7kT&X#J)Ht(XYcSg!vEzh7Y|4+);^%$}wB^i~ZMZRG71}9_SI_3k= z58~Io6okVcAzkmGw6auttQ#1Q)9b>Xuk|p(5)Xj( z1t#ETE9kcRw#e+{eT79-?hT@sQD0k6{|>>l_HoN0Bu#)3$H!@p9DVCL=Cja@ceKvZ ziT+c#-JdJT@@3v7T@|%s&OCh4=)Dj_j39dWZ{b75h(u5I1tTw67`Q!jE%W6AJlinq zYg#;N4!W^~^LvBI8-2l@TM)N2O!*X3@;rR0L?hbEAo9r6$|Z^3padhgrKY(#?#3}+ z-gs*<^-37Ap2Yv4O#d0g^($Jac@ulQwIT09%8*f`@uN~14_3u}+_KnP-}ql;Ajh^U zE4t^y#-(pE-QX4+@S-qyNgjHN5{UfYV9H?3Jmw)mXsxI2J&d6a3_d9?#vLkY#=fOc z>>mflnu+1UA@I6$HF~l&nMxkm?{Xq06Hj@^DeLK3Z+wv{pPSPvn)&#L;TxZ|*#3F= z;PWADPr>rcOG@y9_Dl=dYN!7yX0Sn*U#8y~_Jl8Z;nMW{v2c?g$14O3%mb)o)DVLj zvQ)Ds+W|RXrf!Vg{He@>-s~OBOq){EswwUmlPbGFAF$e3i}Lwj|JAn4iVY1Jn>!?- zzsI~pUEA?LNbg_H7{~x`{Lg}!z&NnQF?b?_VNP#5B16V$1KD{n9UFaw|2@kY8h^HnNa*p;x2iv1t=XV&+~NSf{6#5$p6ffjM!qB9oaR^)bq# oa1HmEa1ED^l(-iNI1qvV2iuD3?j}ens{jB107*qoM6N<$g6#4V_y7O^ diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/realtime_start_pressed.png deleted file mode 100644 index feef0fea62a15ab72af6556cae2811f9e5f1e3c5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6113 zcmV<77ar(|P)Py1n@L1LRCodHT?v#M#hI?E*377TJdBS72uHw-X2u5F*w}#($YQQIHkiw?teNH7 z_3mbmKoZyu@Yr=6;>84$W!bQ62uGF}k7OB}%Ut0w8~k1%;23k+Xf$9RBn}4547R$Z zQFqt=)tc5w8fiu&sip2#^?Q0%UFTn4|Ej-^zy3mqKOBJ?A%JUCuOq9CZSXC|6dwn~ zH)4vKC_%fVWxbmUHHLHAksef*_pdT^4yVH{4)UorVy?0z&iyO7Z)c&@ED+@6Oz`_4-cgwc)g9#$mCZ zwr3St9iW=J4N-k9FyJO7*l~6~Oemt0@`V=)!#WC3iX=*D6Sy6~fD5ROelZyI%_)?p z-oicg)?l-}3R3*wKL;2i>!DPik`*xYBDUgRDE*iqtRIOeN>Gdv7~!4ydOel}3uG-) zwbP*ZW6bhE{2o*Vvd?_abd_-DR9)59LfFS3j-C)btni3;gr?8XXmS;Nq=oPy8;y^J z(J!03`o5d*!x<9-h`|1hTE6&dhQ&f{e+Qy^E%cOGLm{a}_%dD``Lvp}RI4u6Fduzw zZi5^d*b8H;EU^gY!Wad+?brs)d+G3AX($wC_X@CigT3B0CGE zvvas>n2&)R3w9nsu)Y8j{UxuzFKTDt%b^XHedfcm&(?Gq7(LN|@ruSk^bhGU2Y4LA zd`u*1s(KU5GN(2O_{v6q@9Pf0*Af!rn>vou654hse4<$a^zH`<_RRGLc1O%GySrI~ zTRRfVX0EYd+pjb=`8uq%U4$q-QPuPncBHjhGV&2mcWXLC1RIp;VS(I4#cDw&uEN*U8~$VI>gS26}zLWK?VFm z*6YDq;8j*rZi(1cLcYJ$b^4%5JR$f7SW3CYeCIAlfFWQ>&zA))#y4}u%R+>|Scw2z z!;6J{7usb5+sd}`vP(z660uu4IXtIb5io4eX!LCC@!TQ$-+jLlbaLja1d1*%gFZZa zqM{tw3o2P&ScLva^q`ZlKl(|z&9;8bS1yG-f9(;tXvYA|g7-5*U=jK#SwrjD(5hU* z$}S-BP{%Ym*l{si*DIG8p5LAbutWER(&cb?3pzh|C_Oj6Vf_{5Qd<`D$zk8AT2kLh zskT`jpsqD&Q!dDE$nWj$dm1=zmDv`Z>QhaBscy;4M<1UnA(cL08sq_sol>bp@O;id zKnh&85{_tI1rvoeQj2tZxm1_Dy`~)OUk(2R!!AlnMVtO-xy11N_C^5XmF$3e#Vo6q z%zVIj%S@{1T;iGH(Lm6&{r_K*yj6KZqdvKr<)qtZ>$3C7~j8s<|KAV#*VZ(ou=kM zV_+?SdrzW-Apg<o1&{!DiS z^RWZMU|s5d;Mv(Av+v1F%>5<;Toju~6mmu3OZUK0%aoDQ?%;358Z-{rUrBY^$CQ{e3{hC@dBM)rJQ8Qx3=ocpBd z=WZbcQlc0xVCDVz74TIM@<3nKs)J^5a7VR-V!j9@;3@fzsy zWDqOAZ1(rD+pPIhECQw|2DL4|Ks`2Se0yzdj7m-c`{BR$a<#33_>JiK3Z~={<3_*~ z#Rq%Z&#iY2>i9#`+jA{japoI0ON}!Fh+>TO?=TJOdKJZTPg_tM9E`}p2k*^9k27B; z8kYL&dRA;4y#Vd{E5VfBAtub6`Aj6L9a)DvIS$`z0!(Z&9T74u*UuF}OMNIfdy)(3zSQ!y&J~am1{c($iTJ zU}Ec05&kCgbLL|xIQgOUYiw2=58GUAi&D1MXFkRmp)~W+M>|{C&y3}+gAf2wtZDXE z+oFgJnux+;`Y`h`8LY_KpOl!kHx}%iXC~sV(-0UE#Wwtga5Q6|i6+(27tC*LuW1y0 z8)WeW7C?~3POrVj{QPkm0(dW2*a~`Wc#Kn=EZ(Q=myNsn#K{tz3m;-WFe(J{IJL72 zOqkX>a2oV=OD~vpgKzjmbHkkH>}u8xFL)yc-{-2#ZGO_ z{=3-BJY5YCaDa#{7L`=bmy*YM%~%y%5KmVX1RN`3^P!U(^W}@-VKzd*`68BXo;D?( z&=z=U)AP9dt&bVFocV0TVk1alB9`DS&1BB=cAdqJpt(SRl1M%cr`P7mIKB M6(* z<|_b}4T4=GV(Bxpn&r=8#^wd+@EvDj%bCwa!Zt46tHDCCo9z7!ex95<3@`n_^k*@j zv9}v*kW86uCgN@t5O9%*%|#2YK~48|aBe(pa5T+)iLmc@Syo@bSep_Hb$r9PM>V$; z{%#clt`V{M03w7Q(6w~TA534G`5^LC*yT73&h5fZoBlmWaD|zOyH+9KJ`tM_DREif zR;8!oO1QN22=9c}JIpj`=A%T|jkF<^D-6cK`C}adE*7zcKoh$o59G$BnNRehHx!vE zn!*Mf#nZu>P!nDhP2^_a?`0$4Y7sjgbkfX+FMRg^*lGcTF09w10!p-qP!MOn@t`?s z+WHW&qe74qnPxr~-ayduBtSj1|IrC+rQLdkh*enF&;WnSH z(dTufNlIw_42b-f%mkeIGSMi{&qZtwepJWZuzVu1U~s48MDw067m)G4a}iqzGhn#% z`~egSp3Y5Iu;*iDa=|u;C{tvy8)1!D`4jw0kAw}|Aogz`$jy?M`G`&?>S$S$B+U7#n@)?>>NA18<)6U{$yAspOo0uGrDM@izdaup zv19SZxZ>lmoTaO3+T;q%QXd#ea0wr@Nau~vK=s;WPP zos5(u#du4jzxVaA>M2_6Dqr9=aH?&srYB!!$1KMx*mm(;#9I1EbcETl3s|B<8r-kD zs+h`(?)g9`#!!iYqm|97hHizz(;5rKR8-3}da`k!CW$NfEY`ZOAbDD^R@DBF2}Lh> zyL(?Lrl!2xYxy!J!qaAIYVvinYb63Yi=D^JmzIZd1d2hRXtCSm-~A?>s(ipKRM+rj zrk}eUfl?4Cn)z7aQd7f?Al2W2?ot;J4VO|O&%+TY838uyWrqGrE`Vp`2n-=m^q7w$ z#u3OwAl}n{u8!#pvAy+9Ja2nRM&yg1?Pk`@{dJ9i(&M{SL;5;qQ%Z?^(Ewnu)sJyM z;~MjgyL6txbqFZo6TS*P-koN{Fri-4KVG*Y%xCO=Z1Np7-pYA;=OLi0@wL$7wKB?K z%+~9jB>lY9R+PBAl)PPv|U>&Av~Y`5ukJ>fgOzbj8e9ffiSsj=vu3i zh%cM@22t#*@J~1#)Mx~t;8u3zvLs4)Cfg%mn7g3(M(Fb1z=&KfbrrXhQK(^M&vrqD z2FpR;Z=iy6j7l)xkB~64x$9jxHpd^1Kw$*R?)fm7l9XD6E^t3Is-bi;LfXZJtK=~p zffNE|GaoAkFOGalFQip`T86Om?KbC1mL`HBMmQQV?X}500H{&?1^kxWmQuw zE7&T6dAZd=Knbj^SzR;<;*u2@|}b< zeGd@*J}k*xS4rY|S(QhCQti@=QlAqiqCm!bMZK0YpGs*vbl)Df2o3Hnv7j&guc~NE zcwrSmKtN$w>lP0`Y6J$-Z|ti@3#!ud)uLg%&XEW(m8e*-?IvBPPr*6t zIWPhIh7woK9+|Q_A+}MYm&3kOk%~@19S(x^!}wWIKyzz(!J{ph4<#lL z*hlO)K0K&s!`99NaQ*82Zf1m9w2ff$&oJS88hX5Iq`;-$#{TFhwV=bS(>4y@EBvjg_j=8y&^L+4fkgwkO)nmq0;1Pn8ltCqIJIqsaP126(!8 z)}cxwW?WVcOE|yLsqmx}*u5S6<_=Ra#Z)~FTPjhI@(K`nOlIYT;2Xq1^p?ypC;Oc^ z<|`U+B`*IElvqz=KiEwF35e@gm0jxl@xto zIc)6D0mhmM!-d1a>dv*$ldaEJ^3ZvgQ(-c(#XCXVgwKET^L*)?9Oi1~V;_bcd{*KI z7QhEz2*P$gEYG|w1}^Q+w}35&eAiH&4x9Wk?9R}~q`+m%bJNGeZ9b}DzYg;NO!Nk% zV_hH(>kD0w1I*M5V>e$Wvp{e59?VSJGQ;Z0ZyS{=t3V&p+SmZ)vtR1zT50;ST}tO= z1*Ce+E9DJ6{{!Kv<&2>OV2%H2U?yN3(8bYNAO*#o!EW$mjnjsbGbihh&O|Bv5$?!{ zuRE{nnQuH_l;DPyI#QcKOizb=M?-Jv1qRtAP4d1|OgW&LQ&3EMz?O|l^#H~A1K53d zQ*6=R#EW{rD5hMurpfv@y0wv|vPy8@<~KNRCodHT?=?zRkhyx%p{pJX-sL67AW$_G@(x(MG;?AP|K@OsHBrf3!iW= z;uTQvfy#%)3xX6M6|^cK(n6A^wg`ptP!Pn=3s)l)3T^Y?aa(DN6bWhO%p`NpzW+Ki zXP8XqvCo;AWM*>C_szG@W9_x~{@0rQTzhTC+{;B^j1b^slx|rjJGeHmkPChkGGEOF zs}Ty@>R-~cK2u>t=lLFXey71XsNHKn1jfDci3ghMv@n}13}X)G>@9z=XTyFd;f2;7 zxY!Wl&eU%nkLOI^lI~5Z?@dKDLu#Z#6$jI-|eZirdkpb*S2gs4H-2nAdq40hX4d*HZC zamL$w?|3V)RbGv0&A9&z6l26CgfV%FL-e)MvSOVbI?91!3GZ(gUT`#zfQ20I%56HWCh)RZH-LjF3>2l>M8w?{KCqm@?0 zD4+QIysA)l|1R{cMl_o!;0k|h-#14K(0tN~bRIp&Frw$;k8>9IoPe5dO>JxMh57I- z<+zojd}6txwXfNRnqxWvj%MpQ!T4|e!R}v`<5Y+E(|j-g^i$DH%A>V6{0GW+jF&5S zR0q2^XQC_R_Z6dj1oB{e(;*5s7I4LQ#@F1}Tgt%alQ&HJ%*V9PpAtMcd$@}8>#Bpj z|46(mgPXEZK5CNmX!QFSWgc1O;TKgm_q)Nh-6LvC_Y-z9pD!(xi4U!vIZGGOQ_yce8HzL$JR2~usmhmkYJ4o;4dY!O z6c&9ou6YJ2=+77fb~>Pr1^7yOYc|4sLY9E$4vPmZB27^raz2YW}1~74^uwpYwh`i^<1b> zY?ynHHl(${WNImR&MOq(BJkdR_^ zTempnRj7DlrhK8cz*A7Xlc^xcguxYly&CNOexVA6zMD_mvvVQ(Vave(G}Lk>Ke)2W zj+!Zow)jyrk*n1y(>9{77`DDuQaEPHhmP=X(F|OOHsBqP5MS`M_B~n%V8ugo4hRis zSAy5&n0cL42%clg1Nb2Q+x&IvN`BT${~pu46gcrQQ$8AKgxB5sNnefUAN-u{7%k>^ zk!7-`Xjh>Qy$m%fMx>Q>VZbuL6tPM*)F1QKEqe-GI@7*UL>tuRZGSEHAjr!5m?@u? zfqX+UCg9w-1|zyl(8Q|w@+~=ZYtjCH;!j%(CEut{ zWaslUdLxD4osCD?Dj&_qX}fk@iDr8_`g0Sq6|xe)p?2DbZpCu-;KNqtJAX)q3Pwcl zK|j-4>;I#Fa_ypY7T|@%vZ%@@9-CVmekVlzFXpRfKPuF}37dRAX)yg4=rhhPBp7AM zi?b~X^L!a|q|fA&|H1aaawzlnEFKDHzf@cLF0-DC5n7sFaNc55phkOV@B!GJ-AKQ1 zKVO`0A~a<(*cQ0Q5c;28#fzhfKOo^jxDY;0F|KXtU5?#R-?8GuQ1xi*zF1*Vl+SDu z-(}bk!xGyNg@@;5+!^j@JUtW+!Jt4aD;5~#%~vcVdeEWjME^AS0z7d(`{0B}I59U;dX}fnmjRD2CT*VuKYQ_h`iSQ!a2)~SkCGepr zH3`k(h1TA?P%%6MP8YDL`Opnx4Qjl}C4f_zW`XbZD9Tb_u=|lxEI?6|FW-WaUlhza z{R;nLPrkgydwkQ1h4D+(mfpMa!NT-Cql;6y*Sk_}-u_xXysWq-r+iSn@6hPj%Agw- z1_6ch??M4#SYZDW_a8e^(*|~-GwH)5^iI{wj_1w2|FX|%WQ^kX`9`Lf@d^DDjy}pt zSMf4Jl-CEsmGC8;Z8KW*d_S$hQ#`at(?UACkGfX2`J_a!pOF*>k>52Qtq-#qYQ3?^ z=_E3F8_>GnU5$TMFonJQSYQMo@D8KDlz->^e=9mVD6HecoNUaEf3t;_H!|tYP`pGg*rXvemSg5SFX2+Aix%Z`m0LjOclMRdL+F+mJN>mKr zBwHdW<)bAu_{n=U6bfeD4uKjZUIsjp;#7^n_One^@dP5ncdSd zIoHYV`^~ALLmiFB3>&)X|CqE}QL1Lh`u(NRQ)u^&=TRR^_XFgL>rxdR0qlzNPd)6o zVw&RTtoeq8{?$Z5IbScUcofInSL-TX0=}}kf!s{_6*1G8cgudkc+iibH2Rc$*z4ia z#)#xefH8eJ_JU?lU<|(=a^e4FO#SL}>)F+d>N9?q{jg%gyX;o{GF^M_JtEVmv44*Z z`H*9Kp*$m`7AR-Ur}`#bilww$v8VG)JI4=@3k&oU$DJpERDjF*ndE1>A}LZKa@Tz4 zzCeFlTk~_K(;lH+Qlvb)Y)2CT@`2Mf`^I88C(2p#IVmT3i$ZH#@lJc3U{h{!NRFoW zFB=)BsTM4`-9-MN$@^8Ns0CL(@{dPu$}%PBzAr-r$n6}-Rq~2-Hl2!b0z8MY)3s-msq(S3X*3g+I7 zCIXko<(o52+SiMT=I4kIO>lL1Ae1=7IyrJ$YRXr-?7Yn)H3UD&x=#-GQ$E@tj85inptHxK&Aua?MP$nRavOa;|3z3(@J+rS+*oDT_yyu zym}hRS>~7q-uyor_}00ast=lzrqdpTF2#-qdReqGz#*Cm4& z9VrF6c`~#vP1z$bFD~!A3$kCI51x$HfGho^FFMOCZc8W~$0aOZ1Y+`LdAT{J((hOE zIe~-@(Q^{djZ>ZoIFWbwcQdA^dbY}2`Dh&vzxm|}qnm3d2#~x@DVgY~wLalY5P(EE4@UqWEGAx<9+3$HdqVkZ)$`3F1W-x!6M2X3fCV?ZjdUNLSFu8 zqTjNB5C(gX?#-zdU0_RG+?|=iblSc4cZ)D<2!AkW#ogL3D@Yam(UiYj`P513UD`XL z=|rtg@?&3kT=`&>iuO1*V+SQnSRD{S){CoFV@dYmqg7ora@>rjb z;|w+H;>=oRv)*J0h14=!76$I8vPK|3`J2iIis|pQw4l^6#9N^1&W| zyW8kWqF;ezB!AgrX_4Va(L*~S#KT(=XC}oLONv2a2MLO~1cHNP0`{UuS*6cQ`Iy)1DxZbqcnl@wKlJZm+UpUeP#2<0=^ciU!9u|DPsYbH;5 zjZP-YMdyC6JOoJYB!8Kb;TY6JZHIm135TNz<%@I%W`;u19*u{04t6wtScYkQSJL~Q z$7Le9G?jutpX{N4iXxJyWd30RkWfBDXD4CgI)&`5m|=XyBFGtX@k&k_m!R?xupxJK zN#CCo=D7$*sC~yjq5eh-jKpYHD4uOhkDFX^I0Kiyl&9e`<2-3{m#{(*Ai2w5YHRmB zsnm&@PxY}EVfJFqoN&Qsz?+cN8n1_YpG;-yK36^hg^+tf`S>~8u~cR`bP*mFnIL7W zSH0wwmjQLcR;%)1cS}g}j>|nSFcQi~T&fgyG1hk)n-K#4vdF{%#s(n#biyG)^dPhw|Cv)yyh1C!EH0 z|rvFy! zxbKIl5elMw#FQ(Ft>Y6?GvYp%2Lg8FEDYX?HC;cI4D-j6Q%hJdXd04 zWwXu~c7@dHst;pNP>cWIX_uypmsa_Fp6UnDzMJk1&~ATJ_MkUdInWl&+tMZEzAJkK zZi~xTx*&Y<)?NN!cSojpX_XJ2O5Z_Uxyece7BA)rGS+jp&|jyngsoSy2~9R~_gfht zK=LHH7CNhGm5&nPT=_B8erV@)g?niIR_fwsz4ULUh*eU=E>U?P0C}2nEpTo#DxccY z{fw6@AAyO%iN0X>BX-hN4Hb&ZG&^xz+R{XTcOjIU(QLcET(W^w;@?PqBuCk7 zlD}nb_s;H6H1cMoWBQC-@i6gRybI^7UX3}PTZ@;1%ej~cKsmndZ|(b$Y#O2Vz*@|X znAS~-!oQ~md%vG8ZqaH!N>n}gu$8b6+>$K=?za*}0CF?sCz-u)XHydva6fO6dldel zb)PAeRz5PTA8eogrAWt&*_oK7zNg(y*f{d#)c5Xl=_5dLBl*eZR79g+Ln{~4C(a+L zZti&@S6pf3)86ANm@ro9#_+SW%`z8b>pP8_F>$lndUnHdK!C4~$xS}52IJ@0sv&vP=dr3W05&ZFn((cu@Ys#Wo@Q)oY?Od>=>Rx*arABKIGbegifUAn>`fLcpn zLE%ih^SVw`m#+D0nzy|Ky}8Fq$k6#x>s}^|6^PraF=hC=G=)Tf-!fW?N4b_(J|gzl zRDBoC=PRhX3=b2X^v=$uPMvlw=8pbqCyq-yItY+lNIs*JS}YaYO`y4%JnWso&)c@w zNk&8M4ToWW@bj?pGW(EDc(`#2jR4<^osh@*Tl#hsnsYXOwv{g%GudzP{@+IQ|2(pB zT)N^QK%-Oi^A_4|Zo_TUKWlI?Tb*8iJ3pbzk*6|S&2FKJPX)zrJgtFYq0(^q$RR*- zu&a1lTi^nn8INN%muc8ngx#-;Qg+qYwCfx0EB(DQd z+W8cZ`ij`By)=G|hFw-BecsA9#0Ecvf5O!G;Ncb;OxVc@hWJ8loVd(YKs%^8r!^Ii z@OI$nhx6qI6X+^&xYJ|GGTD`kOu0o~^1IOK9FY){7}L*{Oa6T-lTS%12E2!tS>PR~E+PK7|to zZk!Pa5RQbWlflIZMn!nYyV*_7Q_OaMrp}w!>B;Y%+*99CH;gszN{AEmtT?C$6lJO1 zc|0q&xc}IRY|-=m#i}g}s(edkKz$eA@mE9c>htI$fUiLCB>7vExPhWg_w@&+e?$s2yY&257=JPu}VU| z;c8oZFN77wr`%m}aa6F&jPM{_WPRsZc%}>_xbB4p2N@Rg{!mTTt#no zX#}k4;YXl+r&vXxqma^_NMn7M32A4%wrj_gSm#*|rJf)Yr;=zepxB3MYB_Chw6n{s zlrSi|@=;;Q@IdQlolugq=^1`~Z}SIx3bK+h*xqyq=f*YIS$7E(+}uG$X;kWk0fhzt z?<7wGc2|)~4UpE0S;A6ZnO`waueX_g56S&p0j}`c zzyjT1H$nKCTH`;r5Pd}_*{V3@vp_&i>(h4aJX>(K6oovE20hldQV-Fei(*pqQSxz3 zu7)#t_Ox3LGKmUxtXU2`u0-^2# z|IDXQsd2@Ixd&-OS_>SU({|5!sozWJxr@%ZFprhM$iC3|!f8Oa3PU7pE}`T*RNsWT zvghm-IzJ_$d>D{?Kmt#@8YA$JO8kD^26jLKoBqzmiZs%5SKB`iYn>Sp!K-xAI@b`SH~) zz4*6|-bJJG$lg6ivS_r)FpMVDN=;CYsi=|bpsaP6>Oja4a*U`2rgpf7_XBv3^NJZl zj7kxruelqw>vly^wllAHd(Gr2ua)b3T(QtE8Req_ijDL9+P?lD;_fokLjS8y_J1~? zJp#o7t(5tapS+rA)w_0F*^X6tN=o@G5U3BxPQPH^ST_q+e##*vzll*Xd=sXZ&Z2{O zyqtfcCfKvJl=EJa%9jl=(Rsc{(>I^3RI|;##oe!Fqa82buwH&6n)qwbWl^j$W7NDD zt4JHH=TZ#IPWb@zo%nZOjEut{Az$z1?B@6h>7|Ho(eq^1c*f96RzfB&jQKoWAj&umz8s4dV?4eJkxr3{M?I4IJRteIhDHX zl+OY;($+LrXZkl$2cDpKJYVrG>2|zh&f-~0ArxwoW(+Mw7bA!o{-Y37m?`|z{$TI3 zrIgQn@=~_SCsSy1SuHYXcDgbD_P8NDoBY9TU86a%E0fPu8q-3DY0sf+ad`4KoNe)K zxVgAF`CU2Rmt&t#8gDHe`WhyFKTbcx5Z9x*{+rf5{VDN4Q%Rd#X*_a7vf|O|2w4{U zYU?Y?sZvGPeAu}35uqE8q?%SKUOr!K?tMC)7MsUBL>aI4*1Ul})V_2yPTrA2CC%u+ z7$c?QpjdOzU6_i^+*hL}yDuHT_|XZaUvH;f?@Iuh{vh@jSv% uhOswgkTK8w<-V!M307_#7lE=v;Qs+JBPw3~by}YQ0000Py93Q0skRCodHT?up>)w#ZRMzSS2#vuVhfkKI8J0Tov8|CT&D{6> zBab30l66NTS&L`R@tM)gfB*a6@Be+5|K9sw#@x$AphO7p66ITw*$%G>F5!aDgzz<7 zuv($8ZGq*z>#`L_bT3ocvTlQOP`lS&2uyn86Zf~&YY{e27{*-A*;|27@A|z^!V9f4 zc!eRvuhPG%s`?4v@}3Rp?@dEBL>FURWlX##1b>Aq;=w?u|Mqm+$@*KFtOXYxN6r}T zXnafy#|MNS-^he<2MlnX*4cb&CQJkq`(}Ed?HxB*NC*}XLe%2k3Vi+G0 z#>gRpd$;@Bw!fN*U$`^47PaIjt#2^YIGXAo)V23sW4)`kdsP`i&F5&pQ(-%S7c> zHO42NUEmG(4D3SRYD}|<0^S^G@BhX`0qP{5X!oqSh7rF2|2XG>&qCCE_tv%dUFw8q zIj5~2;}h2_TIapnQFAmA;Apm<7L5NU5bF6wCAvDq6DNE4DNn|;8IRW4_-T~yC=XZe ztO@mO$wpVs@2kf62;||;mV*>-Ear;wH(zUiUpWKsL^im6PKMj(i6jrs9`bU2ZB3}} zx#YVFc&HfTqb5m@$G?kF=HXtIUsluFySW1JEsBVdbXMr!Y zy~lc2?)NdB+j10^My|u1jd$zu*k<_J&Syd#71xbx%Td;elF7HU=VgzFe+&k<7PW^# z`J$gVMex|c6D30-@+o^iAKW2#!yR%i`f}!lSqNQW%C8Gi_)6wc+uHXJ$ln_2ZfP;% z@hjkge9^zWe@|0f0fj63W;fi~_*XcBKW<`)zKuulZ`QW=y;K3%x+3O4pzLctzpwt> zNMz_gnX3HN-`cY=M|s@OX$VBSgGa=5aSLb4Oa3YQT*sGe3a62j>}T26d=rffIi82( z+xBBDHpAmFW2Yb}OuG&3$fgb~>OaARO4N?|XBa~>x0&F(6jkZL;Aw|{*^ZsX z_CXGJHhqp3O<1L&J~J1dGyLs49=G0=>RwXg6Av`+7ar6*QTWpc07Yesirr}*T;F(r zC#K9YgfR<7*9-%!FB$lp55fq06s~NKdyVb&7xrgdbl_Azt*h}8L$IHs25cZ+F!-MZ z+WM|^%GYW7k{MsLqh+4X^oIccAyy$&rkvqx?|sbrU8wuJtAhK~s>-ok=*Oe&IkZrG z3(iNde+sTX71fPrreDf^pz^*MgtjzOmd5gUU04DIHpE6g|v zRcWnLer1;)3s?=N{AJ2i@fbh5uuU?R)m+~w;*I{+?f+ZsL6C)Q$&Aku!0G+{`{qxr zjfT&}3gi<3q1P$D@=woWoGTlnwSkBCy1OBOO84NsH z2;SrIs9578e_WJpUSt?-2@J3LYP#{XhHo&ysG_h3YT5k9o%#=Hp@I?Bd*Pe3`o(_= zOs_jX(+cn$V%hp!RO1tm%&UvM7ycqin_o5je`e3!%fOrTaO>;LGd|4~}ZM_{% z3SqiTJbWn^#_>-0RAXAfm8YU{vCfH$M(YS(Z7>0MjOhz}628Dwiiwp~%@#BdT7|3a zX4nvuFg#)|RPJbTC+*Mhd)RfZ;bm48;X?QjPMIlkzxB8G-H7)5JgW6n?)^gU6wWA$ zQuAqD!4-xfR#~Dc3cngYyqm1wcJ9=-(~QmMpw)4X~v$>#i`umxz*pgcnKq*+%H{r0@7+$sb7Z~Y?%)Y}%-AA?zCJu4uE|Fb zv&M4`FCXagO|y{x(e2iNwCV`W?n*0DEXFIhgAh{> zrOH0vd@+S{BWk+{MnRat=)cDfA95g@empygT-u@_KzeX8AM8Y^yqn#8Go>jXuO7X| zow>*t!NRAbnUI)TyRxD6CDWZ!_#85G%)8mm%x?U*_jQe5O#55z@<( zv*z>rrhFDtX}93SyHD6Teqc0&g)OIJx^V9A(jq{SAV`nJk|KA_mxu@Qbvpu3(4O!B z#ZGarN=E<&cp~&k3xx|BFOYD_S@Y!s9z}~nUvClw?a2q7N=qZW34O}TO|BM9x!p+d zpeg!Qwv+`oK8lZLhAGRIqx-(%5FmY$US$Q3cDJ00al*GSR5(kEN0afO)6*B+_$V+V zMkj7xFCFPh{tbPadX+4L;JiOc%GufC!MPbvX)mbpQ7Br92%5~;eW)4)NUx+{nF7(i zUO1YcCR#MXHIc#aCo`qXXnYiFWw;~w{BT#$*8bUW@6b`z5Q;VvP?P-~>6P>=j|1

I*aD&6-=kSoGDSJceD}$RV+n)cdsf%0F}}l(z8qqK;HUy4KV!2 zcu?$J^D*`bD*-HbgY}&2PUl>JW-orZ(z)L&3ISzQ&jk@WgRzLCEzX7_%ynAU@A2;D z0W0GXxZkP*fkNteFXO`*VK<}33&UOV%evP7O)`klky4;PREE~2sc;0KXH(w=D8^oW zK7U)^z2HlK(wBLbSz;}rbQ+hiauFbXLwd*O#O+n{LVjP;m%dRN=@U~_~^V^2vVQQ)P1gQ z1W4bccN?WNw_doJ`3}?@0jP{K5??oBFK5i(eOEGT!QLDz=qxh z7aJ`AxY!daC24%RFs?$yI2}`KCq>?V{WyzwygThq1B3m?qjA4gCjz8*(!WgU0y92* z7wCV15z4LpfG0W6oAmQhtPborH!sETzAM9YIKFK^bRwk$km2XjR1N}$5Gv^(62F;C zYJSR-anO7&ozATJ62oSm++kItD6ggyX8s+Uk@>y*c5)!#O#i0wv9(9m!2qq*$8Yi0 zmxg4(^LPx`_+)^pQW~fFH;s>s4QsA$r?VE&F0YiuDz1yHn?q!IxOA0^fK&Y^`+PWC z^$UQ3{-iG$YSLjSoyaAu8U&o`Khf;w2?vJF@-CS?^Kg@uF9LS;AMFUv(K=^+#43>^ zfcc6fE95(iJ?0;BsVqSi|QU-y}4;WRdOYUkEdT1qQaB zIOArF97o27CgHqcURn+PY$RmrIOdb zV9JOE0%yh^zfu>ue}>S4}QDpTv2}}3(jb>D@*Ru zRT%<^ohl#)qR3-q7Kvaj(gV0O?=080^?p4smps+DdrbMKP}0n$6^U#7%31~pmRVc&Q% z;AqnLqTRtG!r^$Y#v`u}cQqd?!;}t%GZ=eYhSsI2E(A#LvWEgHiWp2O{d3lH()bLW zodB=N3|eo+4C6|RAm?51T{&r7g6c!ShTi?l`=3yldKr#T`*vVT!}lyOlA~RrsN0!7 zy5x!@7`Wu6s7en)CnTepN+xs3DjWgQyPYEV+k1YSPM@s#{64l7%U(>s2^V}eq6tYY z7(j%B9qCNn-_?&mA@rU!K7QVgL9A{W4d{X>1kove83_H$cI?Eq%hWbqCYei9!3dDP zN$*aJkTgDGndo+Yu^tf!%a2ePtp*`YwdzV`{#98OFUvL4?USe>dI4B49_~;jZBET1WGx z>G7CJ;({1o{e}H+VW{%YiG{$;tt1FQ&!)a5GLLjO1`&?*&j!317CFaC6}%S;`@3{8L=85VanlzFpG+rj-87J% zHly(=hB7rtD2y*ApL09}nx<@lqdJ^}%>7(}2#{U_GpBBq6`e2H6!zD6kHwy#w!nuD z`D~_i8I8}U*8BlZL~}zh+U?`bQA?)OY47;LO);*r$J1WBKdS%%(ktoLPEq*MZMy=Y zo-V{>j$}%g(fAOl^gXyvl4tE=#fy1@OeO~D?*^lR$C1|E&zlZRCI5y#O}!Sl3>l4& z^59(gG1Y$PWaWjr2!)logZWEo*PQ z-V=^TcSgIKX6H%=$8+(nNO#lex^CExgit(*T+Vh7P*vr3zSdrJq2#V`XK)R?BdtWO zDEvBqsPDVur7T*_N14{L`Ea`=9F5}%;eIPe1o+N`{^Vqfbhor%0ykOFs0#l>u9VWo zN6YGmJDWZi?V5dLE~eIZ+TElY-VW=xd#@Y>NN=*c)2LJ;9{)0CG7|R0`GYmBy_<8D zR@(Tq5BSYY7?zu2nTxUY-M3;}ui%P1?PdW|0e8|D>CKL)2IJmt6@1o`$P1fVf!^?3_Sb z@6$5rr3W0*?pbs7cxiRPx7CKWy*Lp$ zO4od~t=nHfsDs~HLK!;0*m{<^Q-Lsa-)mWoOj?&_Vh|wxOqAhKiKLB>hy%6W?_l)$ z4^)?i%0xH4vvc{C`S+*B>adg6r7bQ3q!-dpadR2VUAoz|l9`*yBc3Vz!tL)`@9f+M zy5~&u>d|Yc?`S8TOIthyFhCfNskJK`XhF#YUbc;|5VD#5KV#%d>=7gEXfWN5+8wJ$5=NyU}?{+H*SMon| zxgf?z{lM_L;31YkviG_bJsZ&Ox+|Y$0V^OK;ZFG5Wog7CcVMw_1Em9xUf;ffAKB$$ z(K|Li(Y>G{JRn|WBjVL?N8?v5#Pp?iB2cSQkqAR!D(hDoMX7I&aG>+7eb5pXkEcp zAhIZ%u<=b}!qBGh})l-q6=^2x92 z(*Q@pGoQglv9WR7?4}BsSi66OZf6}>YWmb?azyJQcDXwfl8TDGXPUWCx%V!+empA| zao_x@?EL2kid9<{)c7o+3w5WdI8~2+8lj?R7K-O&;X`w^x>|c@zEfseSL1ik4Zjx6 zPj8^9;plv4F~-BYAkFT0894byz}2Zo-(m=b1~l#^i2&h6xaDnl9l@&&gRR6Mumk z2_y;(B@DgO0t`*}?*r|9wg-?_`tj~XulYv0g2yo3cojmt0+-A;#}j6}Zv?0oXE(S@ z@-w4ExDY;0439Vqp@N8B{r8EmL)>-%;- zEJtI*-5psyd!w$z+5y!;`S7W(G<+v*_OZL?A{z!OQ|>UBflXq3qrbl4GyLpLMcaE_ z^k$bvz?vTUBMfknRbV;_DbtBG)_0k^=w7Bq+2%#)n=BE+kUFqNhHs_N(1vbBVGq#u z#zJ@(N>T2j8y^*l79MB=tQ-BcbLlr%l-mNK-mB=@>GeL`0kzTanJ~Z;k(1Xczw%Gd zV=%mp6iHxT$+J?*wM-LYd{lUf;i2vN_g4fL&2XLE;AHTT2}~IHK0~|t=?XJW!rWu^ z)r1IBM$w`uXpf!PDVeY;mv<9pd=pjRH}08NS6>@mBy_fj)+|g^zQxLirp_raBgMxn zRG6~Jn&4ubReK|pulZ{O7w`+WzEv#w$KzXd#%C2)JiEXf?j1Uc^Z4=TZ_dH{qL1vY zybEwQuw>~aig8V$h70B3r6p0}u6wVC(ysv~rtYyir9IHzn{=%dKz3;sWn+Bd?%)Y5 z&W>XqWo@9f=ikyLkRW__RdAnLRXLUm{dhors6>*|kl=t7I_H9p8<36qZbwj@26T%s zL~_j~&BgnDQ|8H@vu7tS8QQWiKI*S&1L8gWH>xm>0fn=x{GtAPWC~!HW^jGu0iKvL z3und6f&;Y~gP?l5JPJh*uf$1KD@6%>5uwD7u{+u4lQsw?XW8g<0FB? zolVCf5YfHRM3YqjFXzY9wDsY?b>c4a9}dU2?Z;SbhR0*fKwO#w0n$D=*wqKwe5jdy z=#ODEt@v4DRo>=7syXywe8;^fuFy}Nn)i!l1~zRJq@EU2gt z$j-WGsMO7ZB~K;X6M>jwH)3-39PI9S&%^nNwV~c^KIu+EcVH=Ik=B@4f?!7{Cc|0?;idLGPZ z5F>(Tn-3p7_hK53EqkMqN?mcrXMr2-Xql%o{TrwQ7b>cHrEhsp-aFxwl#8AB24=N+I$hs{nmW)mJYgt{RR+P(?TXtm4t z5*hTtB0AHa#is zZz8-GO8g(wPpF2%!7_P6%^ zWzU;EPv$WXamH&rwXdNMRj>&96i`Vs2Cl$J>1Y_%Tyz%>#FX1tQIoCSllz{h(fuwb zU@-BZZ;F2%Kj($__k696r>ZtSQW0%@)*^$8q42ZN*q#jc%u|@xY|rGt#;T96VAaQ- zkY8rp8S#fe=;ABV>FEG$f$i}I#sOS0yv*Q+s(4pt+rR^3>P6ozm})kt&0faHv@!MF zWO^?82BK1fIl5eE4dkG|#MO4a^)&V_#KJKz5;f*!;WZuqgZBw{#wY=z@jnb>g5h9^ z09FjpT^A$R)HJrAHcC#nG5_e)l;R)Z)|Bs)?y>5P&yGeF?nMl+cpaMQ*+}Q zXZOvX#^Q5#XIhOC=?XT&IG3TuQ-z_#ur~!KajW;)4Gou%Y;GDC0T%&>2>d^$pE3-{ SCC!ik0000Px+Y)M2xRA>dwm``q8MG%KQe>{$bSOg`TNQgylz%4+sFTf$#a|;fFEG39bAhC&r z#3Co~BCB9xk7uZ_r@y9N&zp(I36@K0S9eu))$iBcujlzqj)pR+L$%(1tX;jfQ?+At zcAHDBuODky_qiOakLE^(CYcHCs|nLbKa0uGwlP1pKA&TAtMh#Ncd8+ooRt1Fn%cfG zBv%BFUZv)kISAuC-<)Whe&;X`8LMG_w6Y!}GYc;T@X>cH0E<}!61O8QOobpii{Qt`%GJtR;^N!FT6YUaVE=Ptu z?rb9y;EgZ2DlTls6gz(;gTqGPZ9JT=9&s!Yes;#@5d(zD&4ET7+edRmh)3hM?}CJ> z-Kw*GQ7bNE&0p$Ch6E-qWajhv7#Rt8wmllm$x@PsL9K7!nB74j$+b(Zc~7R|k_>fQ zPRO)D?Gw11&1M6DHEBLl%%hDxjMxqpcZ!^4K|?QlG8LCN2H4#hfsv#n+HuSz=;~Qm z`=JDKyw@$e;$p0NGTkwwuGrhEl87X{!uW!T_Pr;%g`~&J2i5Jy|2ae)EHYSZ9zp7H z{CsZFU@175-6jtld>g&Ni&KMj1;#crLf!`!63XkuYPmYh5% zqyXDH*y23#6Z(D4T$g(KbBTrBT4D;H* z%^ENkLh)Yo7UN@-xD>t)Tw+BRfn6^K_4da!9>q9D<}raj2+J55Z;OKxJ#g?SMlS%c z7=Y%->SbtuYaYu5;BSl|GYm+im{g6O7>GRd>kM^y>+snY zlR8{Y)&^lQ0NX6SPyGllK1o`(NrbcL!qrOOXGSzHi@LzV}h*h`M3`54gh2 z&Vf_R(f#k4(MUicb8~aUgeH-Q7<>pTmY=6mxOndW$J*5K_)lxymbK!N40FU@CNWUx z6S(!MsV1d}iA(}9S{BFZ?6aO)AF<{v3mW5jPbPh80!PlV^`hzO%$Ca~!A)vXza)AI z2$r3pZ)@|a&9Pt^+gDSY#qzb^&rTWeQN`tgx-;Z!+i5~{S(P2fjNcuOmp55-OT1%9 zNenHJyz0)eR$IVuKn4=+f2w|Gx?`4cH!>7ZN|8QE{VvQ$Z+C+@ihILJQcXSx^^(D} z!nV94r(vf(sdZb9ifgbVIsr2({YSu>gca+_a0_KpkVM>#&1iV6Aix}&B+%t(g=W!c zeCae_3}bb+KTZ}qf5ylFVm|7`S9fzj*dDg=02&-3eg;vVk7~rG! zB0kTa1i+o2XCua((T$0lkCT1g;**R{G(cAU*BQR|#>4k(nYH83>({TpkMGC7hkNuG z)M*l$y?XWP+q1K?-*%)MHCJ2(V+3T}rKpn{kuYq1YBFe_j)xNxHb4(nwT>k-t?~O( zZu!!urc9{BQ5X`4WO#s=i^YQL_t8nka~}9sEUD2?l^PDU;~aOpsSAlg)F8wMoQ0XvH~@$$g3B1#^BmmJx4$ z=4vx{+ZhcMWP(L{5)vT}v6%6>0R1u1?i}h-M-q3b;oMxm9Vv>MEcbgGzFtU({{bo4FCWD07*qoM6N<$ Ef(u2qS^xk5 diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_handle.png deleted file mode 100644 index 55f5f73991da608090a5586e95158dfd31760609..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 23722 zcmeHP30#iZ_kUVw)1pO`C8cJNeHkK>Ekr3&_B7T?4M|y&-Pp=fNsA~FX)s1jQ^}H| zlo~>z5J`KfwEypUb^6@*nfI-?hVlD+{_f{<-gE9f=brmL-{;<^_kA8SzZ%b)DlMrZ z$uNxcG=oVdMC%hV67NVpm)utvNtA@WfyH`;kx~E=Vov&YWf;kst0zu0HeO}tV7Gpi zoxR$$i4)cA*V(ODZL^$VoHG1OH<}-->M|~?YPqbL{?n2xUN+kqb&XK*1UZW{az`1< zU2^hzal7|Pnkg!E_BEJ$XUAhn1HXN*1AdHHIbr|4`3L4M997iodGH@wHx(Ba);_6? z%Jt2yOLk6?b?q##b>!WdHOy#}-D7p0_Hr{axN|SsRanU&TU1T%UE-~k9pV_KM8w(2 z#o_Y!I-%d}7-7c#@xVUv@vj|d2 z#~H@Pd5gX}b3vLZ_&9A`2j*$WvFFOn(=**e#F;}vjQ5XI`v~j(#Vnn>-9%eB_7)Qm zs;53sG&W4g)#Cclslu+&9T>;M=I*k3OBq*heeEPhZIm!Gv8TcV5j8`RfWc`uvO|{G z?~X7KVqz0L9(c`28>SfH@la)BSxNTm-mh=jT+z2w?kl%KB1hfV%SS`OAXmgNxf8>v zRHxo-7&YEK_g&7bcPnyjia*AkX|OJ_Qy(Gb6TTm9j_sjr7$S*WeF zztsHK2xIk`@y>sm9GTSnp2utXylq2EdwJGN9NQ?(gzhnlopoL8k3^eTmm(qI12X1m zj*Om5`GA8eQb!oMmBWWRS24`78`W-l{e_qZUO{gdChC3Hh3Ahe8{QRXm`MTK`(2(O z*6F5VXsnFy%^p#)%EFc%C+T(S6FXK;zhjPTcgfRTJtuUV6RY!7^pDFTDgm8OzLL9k zbI2&^+-&)U+w<*}J1*_&f2(WM9tqJ23E~Qty><3=UUvAYda}5(_hC_WOM~G@U4zu} z_9@O*k4sn=Bpjl@=Ex!gjj{pj6h|Lgs8{2jnV>dwf8GA5sY8x$zdBpwk?;n+KhO74 z*?%`+eX7H7*+s6NXRK3IuJzP)y?!P*b;kP*4$Rj5hpk*whsdlr_{?(Q!M&50{%WqG z72n0`t@#Kuw-Hu7%ztuJQjPVuEEex0e85$qKz#8oxmz|n+})>|8=6l%uUVjRO+He5 zp2W^wLvH<`e(b!_HO)=()pwkh>0EI)pAxE7)VpVIf6c`|$S5p1X3$6G-qD;{lD#$E zhfb54W^IsZFd)b@Xk41yYF|UGe0SS>FLT7Nx`wKG9SL`rbvHU%d+&BmxS~$nzSo8l zXZn}~9haXzV)^*DqGwMH)%Om#F#AD>|0D4W`*cptpJp52XgtVtZ-~P14JS1p_;i_= z7&IX8@xzm=SFWyFH7L&^QsTwVo|C;EdZk&v7t71d+xc;0C+R)XJL3(O{n*XA+vaY; zUBkN#@-{rz;cqYRggJws3{~}Bc|!Jt-CXm2mHoCT9l0;>q2poZ;o^~dVb0w3bH_h+ zz92Q{;#}E_i_Ny4n=;SRdpP++0b<2`U@*U zR~%X4_wJVmLNa%+WS5M7<~u#Pcd~Ty<^duD#t*oXzUI=3OD>mw%Gi_rcMj|AwsKY@5(nL zoRr6{2(|0JEOhwG<O7xLJ~}>$*Uw+_xpc?T$2LpxwW3?!w7wOlYcqxv)D=h;oGj>AIJ(lZQnS*w zGNngScljRaV;sh8S?91J$DnIDe@Y^AGLT=~HTdBJ8^m@PY5z6OgxF^bcIeICZ z+6M0rUVCa&qQ_g6qR6yA{;ml0`OD||rDwK}UL@(pZM|4KbD)pvy+yZmizj?6sWN~4 zOVS&^V4vVy+3)980!E&uevwh5Vc7Gy^!ZeFT0PIq}ta-PVzyW@mU zZ926#=3I<=mTruOOS+44#tJVxo(lYFs z-udvJ6!9!M7rS%ShIvWX#rI`Twb>WZYjrnoK1bF&oP;6uCH@k8M?;C#Kx+?C&D%2Ld15by`LtRddWK|^pPn1HRsOLJG#HV zSudPxb8?t(&dE8R4pw1rpP%_t`CVReqW{LR79%X&w~X3-Lf-4@__sUN^o<8~HF_Jo zc4iNSvRyvAjV8yOIdo?BmGFq^V~@V`GxMk%^kxO zgp);Sw)&09e|6C~V5+=v;E+Wokv}`FE=k*(xGM3gXT9-jbCbDcYn-2#9X8pScJ}PV z{yXLk_Xs(DFL7Jr=OFC9*AT2pcO!Lw98tI+>3V2p+3Asim7DywElM=2zjQG?=9yMT`i&PGHS>^3|JKLVF2kqr4-^4=C+A)asG+&@j{ z#)i-Vfwlp_4hkEz8slpK(byOn0h_ zef=wg-xk>AFRw~0oHsVi;p5EwF{#CWZ>w1o`*{76PfrFeI^`T)e78U~e67}t?h%@i z6`vd{3zauY78yQYw8_osx4M{&`VV3?vLCixYq+vJ);9ao(%7Xbw=HCgWhD0cHym&L zd+)2XsYz2GYfRBNIoWK5JZdU#4+8HV4bx%vA zst=^x2y+f8UsrrF&HRqZOT)l54zjC|WvvobK5$uOHV8OC)d!&EmCoyRa61~bfur3|BUf?<^H zjxLFt!Z5R+Oq(=ej*}1*KYLV|P-7zr{ocse-S3UVz;_~=Ho|uzz7xTHPx!tgzVC?d zJA#{m?>mBtZV83Lpwp@`fLd?Gf|8M8FrC>o~+f7P) z3u)`Tw#KuIZfk*Ujc3{lP|*06;#%_21hiC4OFlt!&~a@dLXZM@#Rxh^1LIYwpks6% zuzAg6#|TUWuVUzAY(L#c8#T;p?ay`tl^hE}4KCW|)u*O&Om;lmFAx!Q0qg{9AKeEV z`&c;i!A2kYF)si{0Wz=vBnEwSJldc!o*qNHC0{d83}$OKP})!X*f#W0pl}os%0@!y zoB$SzMyaQ4BnHW#3*%?gxxuF6xO{X0Tn^F^)Q53cIBG0jl;lEzF|d&k&^RAJV&S0c zh(Ph6QIH^rrG<34isV}`D-Pyy8iK$+YC*;^1~n834Kx%8(t(nWCMG8G_V)IZG&MCx zOGrp`6B83t>e#WP5{Y4IYHISUtE=;?s;Y7`Gc)7e-QA4TbR&yvkT8UTOGO3t~hhh)5HWC!$QGClSra$Vj{5;^IW|ptzQZ zpsg0>z+A}DlnZh~Zde1>f;C}n#DTaFC%-!I;*qKR8^6hz92djI0SI|{&cHwFkR_r> zM1_b(P*Bjqva+&=f>zEi4st*)$k~(|)_}EOO<21^Pf)<(x)+Fn9Z^8grS|fv3 zjbs{kV+YYBAQ5;Z;07f{q%)CjL^QLqvd*%>e(gT232QfD04#tBZ=Sd_@JM4l>^jMm z&o>j$;B2YUlQAJI0E>wQ1O$v$Q&XGYQi8AHgEe7oz(54B04BsoVe<805wKq zpVnZ5Aj7aYurFOwQW7F7D?59L%OBZJx z*lVpJ0jOb4Ga2FG;nThq2>=|{1`Nc63xl`-BjQC(hWIyLM8HI7s6Y2U_c0z8gss@& zr=NaWfC1l1H(}u5As7G~G$iA5@AD_tgu2;>g*6Yb448ldF0f$^EG9Z*#tendojVUA z_BU;S0k9Ad^q)*D7V%@BCstmp@eCw`m=NZGh&nhp=z}Kmts9L6@gZJ8unEB=UX5lC z6QYJWoU_=cPahp#lfT7S6BZ6GvOZu$yr>Zt^#9S=M9@KW0vrbob2wy3NlC%4!T*WY zgawE|d^BdjNY{gX(45950>^1hC*&dmo+r?O-_mZv!X|>o-Xc6fh~Slg!#Y_Ueh&Xn zv?eUL8`_MC7ZJ!JBtsiCE$9>INe=}72Mw^ah8JOA9QHZqskuWLSTr^C53`XxS}Xj= z=r;aeB49lW(fA;Lzz19}^ul!G*JOM2DbQ^KP@`$}f zD1s6}CxJzPh=%?9_unKM-Z^~hR(J5=LBPVnMKoY+Qv}y4Wesq z-@Y9pz}&AI(u4tC1mV$u^#CJa7O3Hs$6)NUHS7u$*b(rP7arxqqcQ2CqM`uu3s9Tb z|9K-XfgX`(mrKdQ@L;Bb2oM5q#*h^HxX?QRZ4%0VM{oskBDp^>IV@To7#Qfm?f&n2 zO>4tZ04#`$AAlC?qOU%E)#2wgTzPyX2QP8pNijS+hu3ahh`Y4adriHN_6PMhYt_hldL^+ zU@cga%*EY+M**<4xd}o9CdO@rBp?xR9zT)ylNF|>rcMs8OR1mzS5rPN*aT#{zZ(e5r$@fyV=O zE4_2+9fG?9H3|KEEzA85x;WEiElINl8hi&#v}; zlGl6bCr+H$Ze(P1k>myv;7b$Sn~(tfYr!sU5qPgc zGEgHa&_|n?&G!qMB%VY-Fq;JEqaYzj1QLT}fTjt7BLk##Bxk#`qobp)wzl?&PMtb+ zAv;BZ{1YPhbH|XM(M9AfcOm%|`Qg!{M~UQj%t~lNUKtplp$D(Ns z^N|45FouP*HF0rTBLL*pU;=(UJY@)|=QXte1#uz8JRA*dma4HSK#u_(~A&@pIZEcQWTqiq;t_qoOd zAp#~~fvD**u-noQ5Us%m!NO4ExS$$vU@Y3S2AgMN69GYVvGgPyM@{?KHh|&PaGcf% z95s@F8pml3x#>RIIL=GM7~Nn_bK_MMuQ8l}K3~;!Lx$|8OIo2njWKVSO@#KCic025hwO(&CiBhv=4LR81_;BV;n(X0Idb} zVO&cZeUP7@4L%wm+s1xtnwOGUTk>&TA@)SNPZvdxqs{iy zecI+1kK?F$fu_f38|JY4mRBk+#E ziGmi|Qd~=e$nEuio{|9-i!KnZM diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/seekbar_progress_dotted.png deleted file mode 100644 index e6241d12e6e67c53f45d8955bdae0707e8c68683..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 944 zcmaJ=J#W)M7`CXWs){nO!E|yv6p8pNwi{b9k%=8@fuksi)JQD7*q6jg?KAd;I1y7f zcK!fDLWrRQD+4n-6B8RF3nNmwOOup=VEcR@dY|Y0dhS)L`FL;l-Y&;+d)~3rrt>x( z2RrQl;r){do$eC1M>=>wrZ7XC9pXL$UJQq*jbJ!?{smPzZtEcG_DIivY6dtKAhQvQ zIHhQgtJaDX24h4(9}T0#;(vbn%!4Sj_!G^S{Irfn(eXS(oq4kx%*TNd^0kMcT9}j| zMg)Q)K1*`5u=oY9N!RRHfb(|rf3FT5C$uiJ&K~^)OBUvy{jBSKPB6sOavFf9u>8;Y`F7>q+vO4Op1&k_QQ zQj+hl7#x%bS(K6pCx9`+KAsSZr;%Qz5T|R}Bwv<^7ECN)D$0VyQd$}M{{Kzmcx|1N zHoA`YpTv20mLjo@ay-cb+PJ|!bCsI)3_*gkF2-l8Dz-+L;CzHrQ19sA(1$^ku!H*x zgzuYPk`tH&$a5^7atKirny#(7iqbGNTh(RRtyGM%V;h?0YHCBRxcUm~;9wG?gsiaP zHP+sUWnzd^>e)eA^a6#A498%Rv>9!Vev%e8Rn zW-Wb`P`zhV*dN~m5y?l0YMhc-oE+n Rr2dQDA^}`v?FU=Kn+j3SMylKyBd-gWb8~`SwT~ySTaQ<_^B9;llV*KSS>qm=;@Jh4-_Ynrl&`xv06&f zrclpPtnrNtsPA*&SfOc3qv>q=2*Nv&~5mg+9#>Cjs^^`B_Pd zGXWp7N0Kh^QU^6-cT@BLQWmgyn)IlGSt%fEgzz*3_06CI$4JuvC~1MLWm3F2APWc1 z1A>AfATA%U=pP~=zi!nrZb@7@mEQot3aRQv+fzyfP#_S}yka;b_FF8mE(T>EpTs-&_2{|X6eE+;E6fN%&+{q!? zL4LFI`a&noxwCQ{RH&y5$_S-)kGdJ?EM5}>6-yYc=aF!h`KHaI*kS3FkmXM=-DUHM zUc;Z^uM1^Sr&wDw6`9TeC9@0vd=iOk3Q{ngl085El)uxEj44%!Z!n3)Qotlr$xzNJ z1cuh6YLBAzaeE`VEoF}vy$i0kiu+;cQ+XTZ_sBp#UzoGf+9wb+g!B$-O zHSi;sMw)WT*4Jt;hWQLlSifw?4-N{2o(HnhecjLxwhA}(|)}tcClnj#>#Xi4g^x++Rf@wm3;-Qt`wq#|Yq9K=H*5JIm zPXmSOOG&<41M%zV_3m|rb?$ZkLwjnpt9HP!lt+n;>(YSFohDh@J&{Zcqj#las`7k>7^oc}k z-UUxrqqnT*tQX6v%h}8{)K2Q8 zt9(kiOIme7`h40_;69ZZ#c_Xb5Ii42B0qIo= z@PqKk^y7b7!W_G`VWlqFF1aglnb>j2+bR8j^%>H<`yAGU)_nVX``(I>i|~t>nC+PD z7g(8L%f!pnP1Q~NM@L5Aj85k(6>7-M$Zr*H70whcH#^&f*hyk4FrPe(8rJQ~Z1*v3 z5994rZA+0hc8-nuwdh*m>DJoT@*Cw?^DOh~qG?oj{V&WQrm~r{eq)v#Ux)F@kBoQ7|5b_S8b^P_iA4&qW3swTLy(3V;q}R zSi>+6i}!e=tqUg&Q0yq|qu8UQ)MS&GVd-2==|b+l#ew#9&6Leu?Vb@*- z{{6o4zW09O!s`*m*WyLC3Xi^a55T@#Zj5Wuyv~;OS+0q+fLTPNnzjUG++;ElwKMh2 zrNLDMmjiaq-~zBuBC?e_}wtKr;}EU)nV1ih^dX)=&a@U8uNZm&;WO9$$bxTGsumo1?o_h?28VeJ^FKB%xU) zeIGKf(xTX+)hF#~J2HhtBaI%I+*Fm8s8fFU;PdkN-cjNzdX;7^oIOuwz|lXn1lM&O zU(Qx3%_OOoqivdGe*5MB=`c!Eu%!tfG6xH%Q-^l(bfE-f#+ApDW?>Ezyyl075@um9 zerC2Qg$ng5luB-3<3uZ;x0%wJE*7d4u@yYw^Oe_7YnO|Ck@-9TTE%acg+cA0Ha(vV z*LcJ?T=qUpZIC)Wx*&0#lk`f{@}tZnoFlS8adl}*dY$Ea5rLm#jh>zG^nAvl*K+4!IH@W;xKyz!dCa2V~;MsFopUsLb zglupp`VD=_UwHYkvbqQBKIS>$xxKk0Rb)?}Q_YK(VC!HX@pb(_T%wWGw@l0sItum%>?HbVc z|MU40eGn`~r%Y$_@2kF5L4V!Q^=$&xGJ%&)ZT?AGc+CpM0#S*XLYG;}sPB2xO#f;0XNCVa~c zw|t+$@1TV-Xk|i=n7XCjrjBzs{1kN9fZt$7d_okwS99{+3EF{NW=&f3x4vpEsKYw= zVE4n?o5DQ_+uTJexW(i4&|hrC>8T=Bjm7q(Fh4>s;bfU@F)~o}0yo?`7c9D8+h)<` zbgaFnGY4BtoYJztT$nvrcy_-dxcF#%!GJ&>A3JuLd6K~ZyPlDsF`Ph`uv>InQ&n|U zEAV3byc{;>J|6I)fL2c@Ew#Ggz!Y<$ESZ^!oGR#QJObSr%tj7LklJ@95l3K?;ABH~HKQ zt;_%j69ynE7Jxs*D{ceuSQdaC=c`nj2LQX@3#VTFt6h5|T^)GH{Le8R5&+1^WRDM? t%7@^&YO=nGT8~lE{C{d$^6D;;ND6cZNFRG^aD-fqz(~(rw+`wY^FL%9H-`WK diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side.png deleted file mode 100644 index 3e6ae9a9472b10d72aac63c4755d67ff33704f31..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4873 zcmV+k6ZY(hP)Px{!%0LzRCodHoqKQ<)g8x4AYgz11v-dMrAWu1^Zyb+GUOm0vQDSLmhBgZ_#c_!SB@FBv7h(Bb)b^K==|EbN-&)-p4Sa`}mE&Y+)DhMkn zDY*o|hXH;8Fbu*C;5^4dLi|a>4k_{tNg~W4k#kFC2eXM{aJmM>r4j{-6b!E%M0jEB}%t5!&C{lz!7Tu2FmoRX76dk`v zg@Q>ZOf~BG@#70I{1#jXj9yK1_t4ClGe1lP0P*HoT3UKB&F0;tp8#>EaLdm*=bVMf zu5n2t405t4vD5J%#@KGSGD_;h5Va#-(1MT-;`-AvEWi%gau7QfP}|Dz&m=>?q!6}j z*|JM9axn%D!KfOHD{c)#=x79(TaCq}DZrjLDSXAZ-g@hnX#erzny-XGP&T#d9u!@O z+l!kIQLFIV%mHzeG3F6mZ`?v^+k+5T@0WqT5k}L|hc;j2497i137m&9U;1T8D&~b@C+rbn~0*RDDy-4pvk1^pDyCmw_*203aIWO zJw38Lm4-0xVP>K9E0DDhr3a@AS;A4a%4XRv41|T(_iDGOy$fB%2bYLNF^x9OtqDQNfDSVrg@x)-{QeY ziwQGit-_yxO^cXL(#>V&FbL2rK1A>?$XX3)lUqt!`<5cxw{PDkA!|Bq_ni#cA84t( zw)X$^>({?JZrr$|VD!s(-+lL+>(;H?&=MP4)Q72kMv-C^SVZaheM=N+%l|dlED}~N z?bDjwB$^SB1{mektm$nTaIb8GtTIR|Lf&k;j-}0!+pHR<`z$hB3|W;LU^FM=bo>v? z5^fb{!p>lrFboOTFf)U(g^c&%E~mO5O*q#y{sTmHrk1^p>&cABrHRgnBvix5BwN!> z5TzN6@forbFL}aD*a<^nDNGZQlF%seyz|cc4H?WJGxD9aDUpmaY0n~1Sy}nNF=NK) z0q8F_Z{B>Gk>hJ;uSO5W-6WbvB=11jGjy0!ql@+SDtS^ylJmUDTg{_OW8hYST@$Hs z-)Dv-u17O*RYw8F+?byI>_zyz=xTU-l{Wu2-1E2`ZzZQ*n!$3fuocGfXcQM=)UFu_ z)WgU5G=)ifL?W^<=5g}A4!1HMgIZG>gXMf-D~#jOJtkpVg8^uZ5LT1ShmSNDKNDFp zf@b3=t~4HlT3Z@}X^k)z*0J=DNthPQ0G8HZXhtI`hsYu$JrJ~HjiEGp{%k2J#>A0H z`}XbU3S(g%D|ys*lP0Mz)%6pEtpu>yv4nYXojm7K+MYDBj&ZLthI^GNFJuQ<26mT# z^)#@)5UiVALKn4$0o)x3=|JcBa59W#>ZaID7RrxJ2eO2%Fc#LrJQ~(b5~lVPz*MVN z1Dfwk(I{ba$SYK{XeP$dzP}nx`L=Y;V7ywGN9)U)BuvYvVLTV+AM_g-YL-_2{t1V^ zS;s{d_a(3Roo~?OYeB6rH%r$WRj*OP^t6=JGXbxX_VlAh|LIYrne}(>Lqry0(~>N~ zNiF+Om@8;FP;}VQC}9T<92h|=ZA>T+OYQLq&7fwlm)M#`_&S=wmp#s=wTTnv=Nb+g z=-wz{bbMN0r(Gm1dOF%5H&+BTYbHc#WJESu$7y=6r#7ek{G`FW&T!yD?CW5IVZAof zvFU9Oe=MW4>wd`dgPNs{5X(fCgp=16!f$$>kw*g2Pg`sEXGP1#n7ia z%P!cg*e7yRVTZ_nK-N!)%SEw@$ZKAlqs&-jyU9Aj`K8E~3*>=*VR~W1OGMm8f&O>c z1&>Y)_H%jsaw8(crsNuH5=T;^KBpA>#VNUN9R$vcbNaX z+{St73wv=u9wQ^=HmG4FEF_U{9Y6il(Ta+SF-MLZ(L&1tqtIJjyLKHhdGh3gkqQRk zoDo~@{{8zere7%lpZfr^J9>3Hefsp84I4J(j~+exgb#XFBlGG=xnVVos;=Go+5qnB zRM6C^Q;&4%(&bu=e@zOJ=MN4aKD=qws#V=06%4{Ti(-#a&^A-@A4BJhakeS!w`=_>IChgFnLmPuinsn9yabKYGtgp}7wWHRJ zwP7!P%Yo&@$_^t8MO5W?__bS~1w7xQ#4KQxs3yfBja66I68xJ{=!ev_mo!o}=YjMf zts0!R8RahXLrHNUE^xNnaIy&_jF>?%s`p^{a;VKmz7|y8Y%`#_IozvPFV(cy700z9 zj8p?p8miij9=)tBUi*fNL2kJr!t^>IE)2lcx)XRAMuGm42RZ%Razz?7?Hc0$ZsLcw z0@8L#S8VFyw#DPHxENr#aYTm*)4Q9S@TfOIn67j5}Ts7RxxX~xA(+A2Tu45DmtAx^}N z)z3|s`ievLLaKd{#_Y!^^!Nu#@8iDY{#5IfqWr~4s2_I|rk6tohA@Gnsj9rZypfPq zzFrgdqVK^(@%dx zoKVZ^EGMBRkqJiUfgDOZlfX7XTlZO$(TTabA;!+3JR#T+lqUo{%ys5>EjQ9JL>Njt zlfE#TR)lE*O}B2{^Z@l=0tEXt*}R^GLViByQdTh!%Z24ee+Ut#Z;_z%Nt-C9v^JDr z(xgfIJ9X+b0{geiEHL;H8xq#hRCZ3Knc_m6h#RX%h%ocv7%7}qHOvCR%=m70&1hNK ze-sB;Kc-|?YKfaolLB8{Y<7G&2SMmj~Yf8rK$$kU*96}15ISx`-uzvkne>|y-^fB_0;>Y=_c$g42&_ed6g*h(O;=??5ufaCXOW$%}IkB?C2xD>o2L@DM2*dEVl`;Oexlz?Z z|G*wJk5yptQxw$B73n-`=gysL>C-yuoNM*q^`_RL=-h`x-%HygPTaD?2s0N{%L?6z zJPg0+lJ!_wS=qN-=Yb0X){B6qcWXI)FQkpGtE>BqCCz7r#X*A=*bN9nxs51xJ{#^l zuj&H_4qS_T=LVfTF5o8pJ#Fk`_V4q{2>aE%So50Yqo@8? zkFy1Q9C6Ps zm`z^90i-q`Ix`@Iw{Wvv==CZy)m#d&He76?c2A7dyFm>z2`ca>pxkJDcfS+)|7M-D zphou=gHi5ElpBt6-{HI)`5hsA9ADq(QfFU}U(dO7C^OET;;yg`E)bRF6g=xIj!Rkg zn&NF7@!2+BXuxN=i6aw6+Au2Lzwr%iGv15qlB@} z{bkze-J~wUc}|$sg4+8!VY$E_3^3Y*g}@VeI=0gCT;=13-h}yXap2?@@ne+;i)N^L znh1T>U@Wy@VA#bpI>G*IZkktOvePtCQQv1H;#Cy1y?(&JcC3qqIkHO)2Mw}mlrW{z z2Rc#k7){na4gQq|5h9Dix)R^+u>6o|tE+x&wS4b#7&p_-OpPBl8;mt>D2dkGHc6P2qOE=u#!pep7WkoL z5M_!iYF0VAO_6@szYQET8dRU|+aZjFwJ?vCNmRmUt2Mk_juF!-ahF6B)E2IZEbP?R z8*~q<<6|~u6bgs0PyPAj^%36{#=<&QUQr2?diwM&!VcmV`=f3!q+*|H)(W(lCXE1Z zJ2$R{Y|3z+kNK%3TZ#21yMw|yRz5KaQ+tZ?Y7z@5eYQ=HCAU>w2P}QESS`uTQXG4C zus5raK5LiXhETiI^cD!?XnmK}Bg+P&`NkmaKfVyb`Bd94qor?6*JK+7aXC6qm)_X- zDXst;clvDyg`_+AYc0(u%ih8^9R217N63k|k$WgAzMXeCklG>Y0tX&6eW&=+LwyDDswj2{OA8WmEz-U81znBdjc zU_A+l>xjFCNty&x3MZ4%K{C|ih0by1l1UUoK$rA@Pusgl(*VQtS!xD@ z^J#mRj#|8PFXY{if|aTaG|R}`71t)9zP*(}?I zf!AaSH_vOBC4)!aeORNgiu2*Pr!Zh1O=O#y<+paqrwQy1QJNIj+~6yk!D!A+v1G+M zd?QR5Q_`|&B7Tj3AE#PrG@tfdB(|A@TBT=%kKpwB!u#n0ev2RX++dh1bK!H)%nN6Pa~yuANCCOv+(g7R1efxJ4MK zkD!)eFUz ztsH*?``>|btP3(dZvsptB`7(`R3zH;`CKO zE&9<-X9dIJw`{s(`rJznMh9*AxruzWF+pn*OSK|5*)=NC?vxW|71C6WJ|jr2n*vbR z6R%qLIldm{R54Vp^6L{LY=7!!_7o2zuR`)aAO9Hnzd&tzsz;9=o0?kdPP(KXfg#Lo zwLX}`=c;aiJiQKG0QS*!!aE4-QzNy69j8}?$Gb;M@6pn$nULW)G1e0<|4? vFaO!be`@tQAom+)xRyCG2xJgQdj$Ru0!TmA>2o@;00000NkvXXu0mjfr$t#P diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/switch_side_pressed.png deleted file mode 100644 index 25e1522768f55c7ff7f8f4f6b12073b084dcb2ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4883 zcmV+u6YT7XP)Px{%}GQ-RCodHoqKRq#T~~XA&`&|Fn}OqsZb#l6t&Zc4Je{z3MhZbSgJhb{gV2R zGoAJi`(GV9YKsUcFGosKbR0S(h;-~!5QsR;AjK*{Dzu?$B8E4Z5EAd-nYHx8LvX-hH0!)T!g@K%fHw?-1zhooMeQzyA8``RC4^>q89vT3TBA zb?)4`7*oi7p=|uS*|9fWyLPQHKR>@wHs%ud^*pQZ*|X=@Q>RXCvBPDJl@)}oT)A>U z*REa5L3Jpg4}g@u*!c!!+%32*a`0=!-Kb~yi9Cx${JKSW=M?^55a!b>SFRjgzI?gv zeSP_ouvcDrrLeWN^^U7oua3YT2Ban}KUJ*B0N=k<#gExH^J$}hkCa`(Kv zJeA7+$moZR{=7F3)7yUkJpM(y6Z?S1+3*^1HhL_ry>n6lEH3(xQUQeL3FAf z_m-8FZA*8FOB-Pj(|PUMwWCS&6bLHBT}|S5Le$r3z}vMbAg&L(PeAud($Yv;wk}+_ za3Ah#u0_&@ptKOSapT7C(QKZE9)mH*nwy)evY`px25z0y)hH+^P@0Bdj??X^nmc#y z$+q5cr@rR!nzDWSA&qqMz^+p`v> zXM}Cuyt(wkg$wggei%f3(7Si3+{m`K}VSX3d%hQF1co@O9T+ zw~i|Cikr%2i!7?hu4m4iSwO}eiP^Sf$&&4E>Fx?)D5b`Qs)DhQRm~sopRhgq<8>?X zUdpjCnTcHq7Q4Y@iyN4vOqi|MTE_d+V*YY94stfoAmj>A`~sdu-pn{i9TL%uD5E{bxbcSd2>6 z-%^p)p5GNV!bn&NvsBWWG+}BDQoPl`sP3o+V^biEgq1K8cCp`x93m;g)XyTfOe6Bc zYG6bb-QgRG04?Xu!c5o+!_epPxDzH!BN*xwtDmK&I|}4^H@!L>8;}z~kIM!F($E4vBP1n02&rdRJEH4V%cr#^OglVRj zgbqgN8`QyRn<5cejDG)uxNl=@ZjPV8c0(6oDNKc}jd4t|F$vQe42ta(BpplVU#ZA?}?e3EDT^{dQ2D#>sU#MNthPQ&FTJ|T*qh3)7Q+>+IV=)KP zu@sFzUCpIEYHQle(t=gXmMuFHRr7ZCKo|>aVcyn%SyaNbd>Tv&3JVK&c#Vz7;!jsg z#&lg3SuE4e8M3}jHdPr5YPm14u{Yj9c0|HpIS&BuCdVIU2S;7d8!kIEavew(;&YNL z!AdFnQkV;SaS#dWh@Olj4<~RbjO&lkv&)`lm~KaKmBpB=v{S^h2(v)2jd`ifL72u7 z3FFURJ2x(AV?x*)E_tv3Q@d3hvQTt2D`#_>EI}*Gg}pe41a;ULoi@`oH8nj;;C0bHwVN45uBMuhB5lereY)VNVIWmke?T z!ru>!AgoFx;0ORb>(ypDw&)x|{-;I>?z@`%Y_TD21dBy~Lv3`E7di`jaiA^3q4L8@ z7y-%&?Ad>bCh&Ib*zsa*ZS8X68X|>=?*uckE7Cczta#(S=oe{Ws8;e0CD&^aO%eBn zjg5_qC~Z$sMQTLG)eHIBDy+s|&=pj|w1tL%JqYOfn()9_Crz5vG;rX+mG~W#KRI>~ zfII;(MS!=LCP zL_P#5l}5ChtwlveDY3Tcf(m-a5IfrG^nuJ|zbK<<}5Js-gM~S`| zFNdP65162YWg9I-8bx2EnZB6sNki8&(jVKiR<5|f^c6P-D~B+By9prkF`o1-OoY*F zo3s~D&>@Yr4!*A*qov0Ue{msB?C)w>A|1kT^^*^+9xqFn6=}2|LmJPID-6p10MeHG zy?Tt6o>ctqsb|lhhh|JBDKADPFT!<5KV{~vz7{yPYr)x3~GHr`AhOu5irxWNR#a-KWn(4G9 z62CYRH|8COFp8bNPSa=}_`%jt6^kub(CmL!BMpAgDcw4V6LDkWb;c6O!M-C*O4{R# zpBsg9&~6krCf!a6!=oDyd{x4XlDTZ#oP<}2xeQ7+r9H5OHBnzWMK7BY(Vlg+IdK|; z4ql^bNMtIM`3)mSmvorI+DG_YPs)TZGZ8?ym>AaxsF^TwGkN^(9v4oZNgfKt0Yv zp^X1M1aRRD$UB7TEE4L=7EDf+FlEeHvu5dA`71D|?2J0(o0ho1 zw1^uM&>@T=AdL#2!sJv5GeA(%^d0&?Vyy4cgTqjkGGvrAaUo8`jnUH~OdhbQbqby= zDlrk-@q{fA4}6}fdr1456;UG1Ofi+S{BBtWOWHu269)oHh%`Pty^D1Gp4suIy-0}Rz;I&X2qH`y z&&g32$YJFsfXnuOY|&|zmdRgb%RvCv<`K)<6ZyqVnlL+-C9KAn3hK=h3cGFeM2ISk#|PLIvA@3ujP zXv1?W=M%=V^%!}MpSf2!g^?i+rBjbYgk|eTezrWvA94Bk-@;=v^6@!~gZ@D2GX1p> zjgof?bK)r`zWtG;I0+1VD2%AOp26<~b-0?hIeXby%mIKq1p@9bAnQ5Ey66Qc{532^ z?jUNSUn&2D#t~K}Du4o#%l4q$Nal$8hrX06?g(Uk$;m}3S)PzZA-O@A3;Rf93HOME z>46UD#A;?+=p?yw?GjnE)*d6>R>{JygRJMhI%Z1vQVmYTYPPYLSJ)MmFy$gx-i@h% z`95K9wqp;GMb-2e0FF{Jao2-`r(_9ZVU5|{Rtb(um=xvZcd%Mfc!J!yaEUCyn@!p+ zvPkn+R8i0IpJI*Tdxe3Gv06dGJnGdZVYZTc5mFhtIXbGuEPf!eC|R>eg8HB;S*I8) z^qed{Q>2dpR1)RhSQ(>D!lbm0zNKo=iF4EBZm*Xi3*dgDG-97BEiHYHzR+2()6BDIPbFVjTBMwku} z#%=v~869F0rhp8B?!!7xf*z(PUSwWw54Ol+)zmC(RY|%U7{hy(>66pnI=%Z9mJA#` zObo|ujAKR?7hw=}jZNcKNGs84sxhTzQGi{$c4;$yPpJ%Pr@4PV9b_p!8_N=5D~#nA z>xzpo3DKd_6x(eG9>tK-=vacY<@vaAHJRi%Lw{PW6*dGF?d`Zrm;^|$4wZ(u zMk3Vof*A~rCcnc!1OZ_t?1Z7POoS|nN{BGMprIL>`0L3X{h40S(YFc9kepn+gq1K8 zcET{3VJ_zjD~SNeaY_x7Cr{Rbn(-W`RK{^ifm8p9h{~z39OtFh=#ME?RkR8ZCX@K2 zO9yrDdqlnsQ%SSTW8fG*Z^${|x zrlZ!AS*^VaZl>82!la-MmA21(9!3H_&;gxPk>9wfykBH#)LjF)WYAHhwMHWt$#jb2 zNsniQDRPuoV`L(wY%&~Hiew1 zue(%}6L+R-IwM(tLf_NsCUjRPxRSIql9sKMs(rZQn$HTAm#8~!gc-%C?@QRxs48*{ zMr*MT=oCQKPSlw&ezdiqR%sniP&VWe64k-Bxwv#CqOi#=Wfh9u~s)F5>FywGe($EQYB7jJ_5?! z_>}^TKEZH+MKJrlI$_Dg9nIGMEN%Hg7cJ~8Cfs>4(;ltJO?Qb3)iL9Q8HE5RkN(?m z8>rjySE)OT(J4Pibry+NM~r9#xb_s2NwiH>rWf(6G#ygLp+EHr_nx)<3U4M zvA&vs`vUhzY4QDwa3Au&+KAg%moEtmMMzC(K-9kLp@6DYvVF0g$yeO^D2zVPDS)*h z(E=KGA?`xlGWsf7=sP+hQR`AZ)o%T4=xSsIVE`I*sWD(5Z=uqnl2Srx>&CrB4uQ?i zNQ*?C>C4njkaLmydY;$ob3lG++yo`A!_k322Lc&H;QuVIO(gHg4c!0$002ovPDHLk FV1lMMScd=r diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture.png deleted file mode 100644 index d6ced986e82ce3eefe6e1f81fb662dc3797cb764..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7514 zcmV-g9i`%lP)Py76iGxuRCodHT?=$vRk_~#%;YhXmXsDLmxov~Nhp0lDPBdyRzPck@+O@`T3EN< z%i~H#xGD+@0}CurPSJlG$g@naRv#@3sEyea`v!zyJO1Kl{D+Fs7VBV2lvpW0Y=5CfmCvw1^9S6qK*z zf>jBPZ4WN)T02r^M8}0bc43FfIk=S@hQPQtKJi%NL_Nl43)7s%IeRx4?piktCA`oh zp=G8J_osjJ`Fv+oFYers{@yZFQ?xV2eT<1Wgy6flCLRxld+tr=9k0*Yc+I$In>n?& zt?oB^G`?3D@jozOu7Uxs)guk3X2L`;abTwRBfaA$3k$)5LWnB(4KTn3!esaKM?!N( ziZkBc`^Q^`?fmLYZ$kJq7{(AMggIr%i^M~qmfxfA;Tq%SVN9?-!C4m7~L%($@%@7_Uk^hCg4`!sLbyf>An+`?tq?23*V zlVe8ge3S9Rf|G+Q`GQw^(+dtfhv-J%W*E{(1Q*-Mkc1}I)hzD$RXVSmXQVTs-!wla z%>Kg#uhAu*F z%wmpPKE@|nH9hjs4%8g=1UQL{@IQ3k&H)=)cq&&cdVal z_f>{Fw~WMB%)WyDb~^0 zXvX8qFaxqfiuIQWHXr%7zG!najb+Rpi@xS7G&1Nr z(LdxgP-6-mg7Cx?nve(w;ZtGm1MN@v6ejuyXlDxTLLY6!sT1(!#i75v0o|5`Bpij^ z#WcP&818-{FNrx6I_AbFFhAGV(e?!_y7vW|JD+u^zbkea8s&cm=oHXA&@rGoPzA_q zHvoY>AnL1a0{srO4F+jy6Z3Mw=#V$<$}&!vFa|%T7vvOx>6opIF~;P;{RW z0RpEH3Yw3dlJ8D43@(JEXy*zU+2^-69Y zgi3?OSt| zm|WvG2EyGpd6h93Ts34Z0xbo70YrVdQgPHbdt{SD;33^=&3nj_L3Ldm|nN! zalRW$oZ*$>!RQVIwLui}FMHq7TY>fB+3N^p4Ox+Q#mI zIGW*~3c3ySA*dW3<%{w+)eF4RWzhb{=$gipAirR(VZjng&#YI^0P9f5x)$^$P{1p> zMJz{$Zuf(3MRWXjFZ2{0b8GeZ-nY;Vs==n(^?^z3LVkL8pBJ)6lQCw^?r2KMe1p;X zLDztn=F+YNjR(UcXeghQ-<_0y!hv48aecmhQ!o&*@Ph#^itZ7Qi>GJTxREInH?K7x zWgxaY`V~^7x|ZGP)qfKJr{%UEf@YHJaqei;dIf61=iT7Y8ws_d@A7S1hQ`^C0?j>_ z+t2y%^H%eT4WTN1ckg4^cXkU=Y%f3GZ3bX;2ZJ7joZo|#;SB;xIAHx-JV^KqzH6u+=F*EIf3Or_jvKRfXOCiQ7< z27Mir7k!=RQv$rSTz5O@dNjo}Us$v*FE!tg+;aXM7lwBPWNiX{1EdUZ2(TkQ5Ke^G zCj8?Penso@QlK=47hUs~!NR(G#t^4+zyGd4)6QLPWZ7|x&iG(>G~&9&R#5q# zA#fcG5E~1eJMrjolXQLG+vrSIV-osr0Y5vQH+8?~T++}O`S0@$O>e{}7##=Q1qNk! zBckN$!!3j_;p|w-IWO(SD0nx;qg_Z*gFQ@LE5~xuBDfaN_sfQ(qmhPlX6!?Vi`7~(nuN>C_Wck^YF#e0!CnTWqi?&(22%? zxkU(LU39OweuOkeeVzDETn(o1sEq2PR|pW!tDV+>)h#_w!SZOz%W|eLr`h3L{hWTL5zQ;nZm$eZ41Uj1zKHMo!gD}OvR2%&Nq6PL5`ajvaE^&7(|2rC#p)Mt}+ z$S|$b(mbAWV~@a(6Xj8GO`H$AU8;zT(vr_hAhmMg5=#%G(@Mx3G{KHqFSqFz)c9a{ z)W_Te`jlJ6ReaA8_#-6Fg8@3;jDW*nS7DZRO0bsS>}1bo&dqL)S*JdpGQ1qY%X2g; z7sq|@SbOO6n0WpkW&p2`?qRE(=vXl0OY|$g=0rKgTW|!v=CuE3FzuhVmY9RSnC`{4 zpR@VW#)s2Ql|DJ~RO>y(79N-1#Sz>JuatQ*T;a z46GP6g_Ms{#z#wNxF_$mXgu~N-HMqKUpBZBj0edwE50#BfXa!=OWOH)<+ZR{nghgQ z16u&&m8xW*c}bfO(FzR$xTDli{hY?@mA)d)}pqa+A`8uHFzDr)~H2 zGdVZVTChyStQOke-f--IX}nCr$htMHV!G_TzsLZMZfROYx*tI4C|48+(5X0@esMhd zmDN6L%{L&7tCGSvUn^^Pi8x<@uM95g2n3uG&8egD#0=8glci>Ft84iDb;96)rAun1hp@l+#%8$x1FD`4J zFPBvD-!c{Pc7d!b7;=%J;uH-6Iy9MvzTZ1uYm~L-%Q3;L5lFZtw_lAE(es)Q3DZ4tZ-6F_l&E@NatQ2$#z&&|ORi>&mV?-uhSLngux^;e zSm9T}mhN*i@#mxFqY;uayi919?mH?^>5;GCyeFAe&Q2GP&!P%xnDf;5Q0viEv^3eM zT=^kzWuknu=8VquqLzG-7!jv3wl8`?rbro$kM3IOZ415B+a5YQQ>0PeorQ0cNBOQk zl`aBQu2jB~j?-yG%b9|oXopk%Va7)XgmJsnPhqpiVWs%KRA!0r5(yr~Qo0C8mv6v# z(0w>+`8=}vE^5JyyVi#tAky{yv_EU`L>gnS$>N0Sm3BhVt?$Z}F9Khna)vRzn=OmH z>3?a8<};)54X^odGrS!Emb<|YOZd&;8D`89@f1txAVB3yO<4Q1 z?B^`Lbd{V%j{02O2zXY`!;DYkd?jkUD7LWtq^7B7vkVTXibLR=E5oaJN*;l^iSl-x z5l<$Wq0FVwrw4Fzx?r5IZQ!dexlq{UX2`#w$(+9{{GHr2%7kZs>JdMU zz5V9#u&r1i=ufWmCXEkQ9UO&^meb6N4Y2%Bw*Pj<> zVyj^BuKxDW@syY{zFZ(zacl&njSp*UpQcDOHyCI9WC|u3e;mz7o_G@``;aiPme&ly=t!J(otefAI=%L1Nx-7uleT_InToA+5TNpxa)HT# z2lU<1dSVVt4&q#R!uYTm8SU}l$}WsBx$A%!vN*axMj2lgkjnTomA{nZ3ts631ucC`HK>MPC%9DpEg`3__%*3v=1Rryyy+^PJtI{XZ3vbMvy@#KZ0 zN#pD52ptoR#=CSL`&(~&!^g8q1ViH3T#pQ|;wd`>dSs4GQm!>kq>hUJVk40>KGR?) zVufug?YCm4`DL3y!vCuTk76l51fIHC3y8;ay;S7^Q+%t&mx z;4#Fm-om>MyHc%%Z)O;il=N4*qA7HR~h22VkaVFka8)4>P}`az$^_JMRh0hv!#KV z($XUYq{}oJ?SiKsg~m<+@%rLwwQZXrQv< z<2V!49Q?P#X)11*%Q)H<9Wy5DM&H|Lv#o0le12A6-PF@PEOv$$h|`MK-_Gz!y((=4 zZbXyY>SY!()_l0BbT{hC4R%5pE?d7KV?Sr{@51-gEi_r!)#nmGfXb7~)l-+z_$UR; z%X}A>-jB@&hQ@u;>v)JmXMk6Es-+S()(EUhl&hzDWNdcr6#Cm6jvdepzwGbs+VRlz z(r)NoAlc0cc2wmT9|Ag5ng-*uPMmqE-l)dsCLtK$-H1QrZVor{RNOH`;2s#>a@o|y zbMyV|z4tV9-I70&0rvtsCq9kSobRYk+W2T&eQ%`xvcC4|Ge?z}@)7?v zv=(?p$}1Ht9R#jOl%JHwA|C%LRx$?diSx%Ro4Q`k7E#*x^bh$;Cd^faIq(7)!Gsxn5c;x;&vI~DLk}~ z>;05x6)au^sGNrEyOU8h=&xOl(d$Z0ursl5??st7(xctJj-zK8@qw4^s#Wu`Q|LUV zOd`aCj_5*oPk^M;qAJ1pZTGUT+z zlo73XyhGsjLKz;ZENy(m9IUFi4i(p5QFWO6iY)0*iT>Yu zMDR&RQjGaRz<}UIPMh0UTm9!uE@lSmYwzVJY|iPXGDpp>SGiAQG~)(CuhLL*q!GB( zsp08up^FS=K7&f+yL#t`UzJWNjd4tjk90tWAApBGq;g}3z;%gokXAwOnDJ56c=y^7 zxF)LYuv6a?uu-$5MO{i0mq5x6^nzoV4Rcx)9Gace0O`fOd*2L|tS zBp`*^weg9LxwX-~;&s+9UXQlbU6UePHh2{n+hv&*pCKPl%P0mAW1 z30&PvD^>|O?^`K4F7#yLm?ok@?}L=f9Rb46Q6D_jw#1{yO+tLvU9(h~so}f{PE91& zFfx7w;!+3sQ@P3*$MI;bHayrGIY&Wnue%O*cLUn!u3&xboLpBiMoZa~W_Pp%+K)7x_hVPL+^sre9?}E!Mu^!#7al zJsg}k>1=*_H?BI%kIQ?rOCw<2h&>78JH^fd{S;EBU!<|W%lKr_;G=oLR?q~{cys%a z;a1RHFuHgtWm@57Y+E8tsC78K5Ut(K5D|Lc)l8RIcQ?)w=xOuLq1l4 z==QL8${|k$GrmHB1tUBVX;E{0B?wD>CGY4)P^#&udHtssxrwa_Eig>B0)?*!s)Fb8 z^Imb?SIkAPyf?eKDCH&&6%eQuG7bYR20accwWf*mCA~@i!ze#q7#?kj>W$RhWSZhh z3~T4$AN5muOg|NAjEO6nnlIWBIuR!F3Fg-x2{v`U2^-{LSvNl8B zL!bzZ&GXs_zXo)NFvT^t&Pe0UK=p*#vcI!u$MecB3gg30=mYeQ1sd=MGtSUEDVjUf8@&}U%`-SG3~dfP%TVJ7c5D*?`b6ZEk! z@}}5p+5Q@JH{rcJwGXr7Kl*c*P#xvbE@^V~Wc7 z>>{N;AX|9R{;_TrY<)_>r+X9QVqgVEv1j6R&u%~GpU!qI?O5llD2;D4P-wHNZfrSA zt7KcM&+mL~G~V&@4eRB%a3QWmmqoMl%r))&V7P0Y{alQG$r&Gjz90YH%b+;!BjjuS zoZVU(?*2yU$p=6CurU1mc*$3-=y;i3ib8OF; z!nYw9-m$rm6T32crt^rlnNxM9zlg5Ik*VJ_jej!`?!GnkT`7l4vCk)sw;qdr6-#y} z5n(a?Ml{!d(j)a7#bb>$SzNSE=Y1bbH9YJp9-y7Y)it$u7p>AcC|1@r9}X`4yD&`J z!7*fOmFDO315MqVGT~+Y%tM^&`W( z$sW#>K;pd1i5N^gUOgeOmY?~mC)=qqr8QRX%G&q{R66*q$M!8ifiJ`=>U>PkynuDh zmP`}a*!6KWc75ELEDY=38S(32c*(MK{#Zv{NEp1y#5jO!W(703>C-A6t_pA8nv9k`mc8*gDN&7A zV4Fw02F>(z#5)4DWi0KXY z6KQhFXoYN2gC_@mYI#(<4T@fLME|4#tUJRun#AY6Cc%(u=F%9(8&( kT)JHKtPoHL6aaz$2Z{Yha1pyIxc~qF07*qoM6N<$f^p-moB#j- diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xhdpi/take_picture_pressed.png deleted file mode 100644 index 5f9c8ee3b51b5849d375136ee6fef178103d9738..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7549 zcmV-@9fIPCP)Py7H%UZ6RCodHT??31RlQ$ppEHkh1{?$xUuiODW*}ZFc+;=CBBmrsXvT16)HJDE z-OQf1+r!VWkHnJdR(7c@y(P>HLtuoOSc#V3O|PMX7%&gSN1&ht!km51%-MV0-~Y_s zhcm;OIcx8I-m}*Ct+V#yzh1xp@2vM)n=xe+0tG^V7bxD6jJ9`8XaN^|5)xm@1*;Mo z+YwycwQjh~h>k@*wy4A89MZ}hhrp;OpV-hiR*$k7!ZfFI&h`hxUF(lS2@mwP&=OOK zUkrWc^Z7nly}0w?q0bXUHAOpP+{c)BSqT0T*TjRtaL?UCWk>71Hd-?-TBl9yZLRx* z9*G|oM*K-8%#|SEI=!vo(sY;zCQeNEe7I-aWMLs#PzX^4w*ds4CrtL^zP8YL!}%F) z&;6q$gh z<2?xeY&$$uzenRfZNGh=>gpYh=a_M0C7!*3>gb_>zjhOE+HyEmrrfT}vf33LQ^!Y* z=rtzeX9ULw@8a{e^$t}WWKPwM*cK4!WWmJ_B9hR=`kKW(za1*;HAK~16HSl`KfLFb$KOiRAvDjV{N zzs;(MbROP^w$-3&6B)cK*wXWrd;#huo><4^>82UK8h@NC5zcIseD~M1bl>15%wqOi zKI9WiHNEZrohUi#32;?x&*o$poIO;*`E8Zq?q`$F zO5mbo$VXL@5s!Zhz07keeEi1Brmp8p0N

F}ki{k`XhuAnK)z@_H_WSvJK)AIiJxq!h z?Zh#H_n(okG-M;5q95nOaL9ca4*3w;atVW32t#AqFSC*PQVyf4sr&Cpe^a!hvC)jj zmtX{PdthzCw9)cDn2q-G!)Uf#fiy5E zyNhZ3qrq_ZQ*>WwQ*iPLjL*e7T0ekE_gJ90^H2E%1Vp3!pAOpyi^+d>8tloiM8Iqt zBHjU541_c4k?w-s3qsn>S=?sd^tRQ1iWW`SAyb=~3;*STmR*n8&kE%hlzd`i!%2}N zMjJB!egZ)AvAMZ!G=p%bAgalhCBlt`EtENeh>3_h$%2S!7ae#_oZepdQB$xVp#-d@ zun^Brg3aAGdkxpC|AMvoVy%rc^drVY$iSI)Cb*&HBt0M;Rht*UUI=@Z9bXZ+MEqHn zrQwoSa7|U>Gav|6+gUMzaL=#xj;W7}cc?zsJJyUP`3D`emvr4wJnpoc` z;&p+ho&SsQbJ_{A<185Y>;!mqK{}e>`~d8GVX3MucSBY1ldz9~B!{w*S*fgUidNA@iN>Ftz`(C#GO&}hv*=vENF}l%i_5y%b z5U|S&5WQ`orRZ*d(+)%9?5Ba|o=@BNxo~sV;Sdjps`P`s8!#`M*bNpvtnEPtTD@e# zYS&$ZFlfj`nE?do5BOzZLqjy{KtfkTG&RlLOZC=J%wz(?F&L0xoW??PYV2Y*tC7$u zT>TKkhL{23QQ*SIJuU78xYG~}{WBHu2+-pV;qpxjr{mvBJPEXPFURP^LMrtz7Wi2S zFP=nlQtj&Pp=(W3tg=(4Y5Z##!@JYIcj5v}YTGS@o#({gD3{%Y{BQ8u7R)t*hvuc56`i{7rz_zN7 zn*a(88Y|-t0UDIP7X;ky&US47FS~s`n7Kr5trekgiHut=0-^;kjI1Q($&V>wExs#zz z2z$L9cRF zyW(=M;VAz(Mc{I$?LUKQ|FlztG3XoUTx|P3lPgU=>~5+gn<>C_?~x(eS@yAX@6@xR zL*QeUzN8XX6YhKlLv0U{FbY2{F8Y^Cd4!M@`Dh9a=j82<#G|j$W)s=KQ{-sr`U!ke*0i(#Fp#zlzQSjR9h^fz1PXrTP+5q<9P~<*sW<5Q)JKmMFyyM zOT#K^ml*+AT4pbjOT#aYd%v>T%vkdE3*+<2z&KwgOFYZZT)2vONdQagg7ilE%bCeY zJ}e$gEEs^>@AvULWK)7tZyaQ4L$@-ei@*X)k2%RfM)C!!$9xh~X)Ex>yAL}2`~V$c z;d+f$qG0Am7k(QfX;^bI@*4&HlvS}lZ++b zNS;S{cNyL(;*9`M3B4jcl0Kyk3LlOsx8-ymG@X7moX4z_4-wOXfs2QWP(3e61TMDp zDwX(HN8_c~>+w~LI$xp3W6ATNhkDLB`4GWeSk=}Wigan;lRinWQbuszlN97^zIbr@ ziOKtvi>#6l{|+jmr4yNw4s}`l2++!BsiqX|>qQ&!Y047(R7Q_PK9VkbX~{=tt@O5r zp6hK7IbCTy6Op*x%aJa9>RIt1Kzb$pN*dm33z~voWM8Mc$B~aV2;+3Ad%@XrQD@(i zDlBoWrqLuoDq#_6{RVs|;hY%u7vyz6O2MC{%ky#T`E>p0?h)3Wa~xY9*(hH%vD;zl5AdOnVP8s~SR#EYQL{drAO&lVXRP!yj6 zxwB+sl}yPaaF(TS&l$&6 zXwl)=)IimB*&{&ucCtt~vGoG0>u9ndPm*ZNyuRGg6l_o z(`}TpK|WRG+Gvh0S6c5_1;5pX24^|Wu&$%v(?}PQaeGi=zi#dpCriussatqR~ma(@q}ly?S2O z2%Jp%$B;|L-EF~%@3=R*@743ct!Nh8PgTMHO5UgjXLr#g^TqUDExC;fYx+eK;l z8Q_`hI+q`|Dn}U**oH#uCmCdheVrU?8CGKC7fyy`Mh;26Eky(z=|9#QnyR-=ey5#L zMLrt|<+?#YntYM=`VYsM*e+PStFJwDK1HU;=LT}+$3{S!e3)B%FL^>!%^zp{eRS=W ze0C3+!cu1RAt0}RA!w}6Hi1}C%M4*;JxG~$cM$qsE-F`c0t1A zssj?pV4`@h$d>`6GJZ|^moj|bwq6K+HdT0_&ByT#HQR7UWQy6|lshUB@TC6)`LLoT z`5_NXvC(0GvgR0RWql~Qazen9{u9-1_4wd={jJDC?A4vXHJ_8|5>JtrlNLs4UOeeP zK|ZTK{)h_zJw@&!KINsj5b&h`1o=GVC+p|*dPs}%ntueO^)J>>o)C>0^QuMFpLomm z!~#^WrK&uH)&)MROgedD`r4@vUHIzsP?MB-gKkG|I1-f)2}Fp_I+n>#R)jCxnL z2po{vHc7eHG;t&ag=g*SB>7B(U4$97iL~B|ndZ&5fP}kGLPm*{9|F>P4=nE4tTEq> z_=MWmgJWvHZ9|dl?F!AelNr{MD~`j!*-kXvvaRf;3(k9T-`KzJA$&xQn z&9E6{V08f(dZzILbw-lsdc)f zHziiW2#~%>@19JOd=yHB#+G1y_aUb7Fu(fwLoy+w(MfBu92LPb;VPM;MxfKuyQf5A z%+I?)Cq>b8Z3ZbaZWZqdy$f>k?MnGEMk@nDqdTYZCL(gNT6eN4QUXp_%ct1+N zR}jWoUcym+O9FvcNZ&Fkjds+9Ohf$1WJXYLUp)6Bw%uXLN5|yS z5eK$lCKp99t?ha-lbm{A(g?h0={aL2tPea}D_MAdz+by*sHe2#>s?cKm0=nW3)5T| zX>SM*^)$l0bdfwl80uB=BJfw~q=`ywF5-!?K#aw;@un?@hx$!RKF!p|B?X1~x#V@` z3mT-3WEN8rC5Qm&b(?Iu`GPHxKxM@R*b~$oJnhU+rt_C}oKJL29j_a)eWgqf@>{!D@sjw0jAX=* zK@LT(GG&AS>67%DRg;!{6a(kl_rdf&EH*GS?vw6Fz)vr{Katf`s=x}3z!R2Uvzm^y z)vg^wUwgyMe$DX9es|aQhpP7kT;Bm(VLwxD$s$092z(vnlisN&o6`}=CmTpQIS`O~ z$(N*ir=FD*0-Hg+cG(n*&2#+hzyS?Sw~U7f4O{KXMmB;R4JxX(-v|Jez$?<wIhzZ=}kt;rp|1MLiq{&Ci>U-Pj zKNV}ALaP&`4E{B=y{=VCSP2yy0p{n&%KBAuc}3vIDD}4IBSz_xuc~S13pncFS9YRIgI{l7 z%eYpCHp$2;nSvp(DPP2+97>aqgo9NT-@wG%-%(teJ|;TonM1Py^1B`SQR4YQfb>H8 z$(xa`cI^};Hj_vFWB7-6zGXji;-dN;5BXbRot%2_Bnl<#908h-UxrfepmVS8eK;nc z`>2ljwAz0+<4Hl{Kb8`81q9HHXLmcTZlkUBmzi8l3)I)%&1Y|Ma)^8qbVt>0(wD~z zE2FDC2BW-Z2Z2>iiKn-Qt}~eVC<>A9>z!|Z!EG`MLOx0?GW;Z*ytXmif>g%-HduO) z&Z6Ei^(>TlciSGgCaUdgr*2lrNA1AKy3mH?CE18mJ5XeNVk?Rx7o8uZAq@l3ux)_WHI38!Lnun)0+m$qCwNd^9W)Zo1r zRuRw%aPDOZ=X)*O9fiXAQY=>C-$0SKjWg||vNIXOMdG=)iAE=nw1pl;1n*BolIX7c zrq?v@a!L@6UF%Xcy<@5Re=r-vHgE}Gi!xjxl zZu3$k{s_nWEnJHvNdw97T|iKm(0>2AWg-60(MenX(cfP0h+752gGqTWu~{+eFM$# z+rYZ6V14a*Zf7xuq-oG?QIn5S3Ib9unl`OpPWQ@4AwV6#--39qY9P#Dd=1^&JM5Tz zT18`ZcxTp+KF$mIA{`B9FvHM-%{>F15<6E8T&o)X7`%*e;BB;t(n8St+*Y#$799*F zOry;P4y%%X8*J%z+<~eeI$1nPKjM%u5)O`OlGLM|x)gdOKv)lKk{Q z5iYlTAsz)QgmGO;@P7=9W3>M|Cno3QiwjYYQp-<+&~WeUZ3`ufbEi21uy4cO2Y)MI z_d4}ort*Zt3c`i(@tQ{;(^jH7d>JL)>fqRM3;F!+obA2Nd9_PDVBLuR9^{*A14Exe zO7}$?`@M{7wFN261+cOotdR-2RAh)0v@e?GXkE4K3Z2oMmwa?=LZFt<2SCYvoI(?` zyk?vLhy4S>xeazRtgAI;Uei$k|CCS4?`@QS!l3|Z$V!wsFZt|L(itB5zJGjCaM6IT zY~Ajq^I?aX>Y^m1!-+0~y%@F}OF}B?o`5C7d9Gd&PQgvy$d|92f`l|EP1P~kV7}tb zVLTCpO>?qTz0#nuXA@l$nrE17Ih3yls)ASZ4{zJ=IZzh?lpXnO3_wCUo^2-Vg|Jg$ zbKb;29N97^0(udadazqTx~w!QXh~G0{r;s;`mJb#Cv=b1DJ{X4uH?{4wlW|SPSKDr z(h<6d#o4==Ut1e&>imaH;$>y5D)|D)6A{mn^;ZD7R}d!=?wMRvt@s+y3So-mnoGp- zYM^?|4B7AOWgCy8ARksjpP(NW2NP=oIXkRzc1s}K^XqJ9mcj!OPk}SpvJG&@!WPOL zLBuq3u#+svkkjg~w>9(}5I8rHS#(x{K6Wi{>OPRTFThW6kPj2s5&dX4fSGU%1Abpk zOZQF3?z4XdB2xZOhi!xT5c8k0d)<-eJD{ z+gymRqREmRPU4z&O)%WGzL+s6Ir0JMU*O-p1PRA^gnXT!vpXxp-Om;?$hk~gqQo=% zUa;dx;KKM%u?*@n_OoJjlmC=rO<?$W9vHt$J}jS$#Ay(Hf)6(z zKCpL&jmIJm>Rp$NDLL}l$i`Y5XBf=*3d+FQn$LH0_2N#~JLc>#i^)xVe9`Q?9!-oe zO85(*s3_C;CBbm_Q^k}{FKH=R^2uacteDbCylorjeG%9cJCo;n&Ng;*U#u}^^c1U zjfI?2%b0$h$KIJrJgh42r=HsoZ*(1A+2d3XtWO6P%C)6rZw6{qfe2_@O;bP-tlT`odr z;=$@Mfpz@K7yg+}u9TmZHTg&}wDDPw9+?M)Ux8WFIT)UK3iFyR=?1W|>f?)8_3^!= zLKAyl#qWaQ>z53bk9O3B1h&VU=m&7ktY9WLvBhw8Rd`3*4cZ`67us&2WZ1YsX|@|Z z)4J4Wqv_gT?wPfg3(*-cWCu+1Qu}JKE5z(U4+cui8zXC4{~ON-hBGVyoW}nM$OPix z%Ote=4z3~kT5vI_oo0!XGw6>_RVn@mKTLT)>MqNkd`>i~@d}&)7Oz1yJq7;Wfzq-X z1lbWhvHI^a;b4)>u+_95XW7Js8OCWyFJt!Mg}_+j1wOy4ESAH|V3ST}(7!RAz-WDj zQjN0*ri^Ft>4(ytJrivY)q$LgP~!Q7sr6%TN-tJoJ>c|cxY3HQ%C|zGpa}dw)N>Lt TJ%&r600000NkvXXu0mjfmS))c diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable-xxhdpi-v4/btn_switch_default.png deleted file mode 100644 index b9e66c7f605dd5a02d13f04284a046810b292add..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1523 zcmVWrINO@|aR^po)MWCtZi;3|P zji$Db8dGh(q%EWviw!YROhXGSD7{KGrMB!Mw+eqBn$DitUD}=5bA}zW-_x+W=ljoZ zzH^)JoCA=wq$MqBNt*-HbfE%O z*a%+?KOV<%wBsm#)@BkYtX#mS*~TfFxASbFFEJpB_#XdeeD)K!GG#6yR&qaQD98F( z%hWk&BlMsdPa)cKoIx*o(Su%`#t4E4U<~PS!G&yi;e`*Ya932P1K6px+8K%YY~W;+ zr+S7T^GPm_!NUr^z;B|qs?V8Y7sdlT5g|XyPx+8(PuZ;JFA=^1tW^)8v4Q`F37_S& zbj9Iw9a|${_ApJ}O*ch+lEM99fG^L2ZH+vOvf&;d+oo%KOGU*(E4mWw9)fkooNyuIT3XhQ2XZF35mw zqb``n4nett^W=eC=H0eVNwauTP;ZeLktUmG*SS0|^psUH+lbvLYk5tW3Z=^fu^;6n zLHjM4AU2~=6Q-0?ERYFeH%dOob>+j3&RMZRc!;Z=405|LY5CgOF{u?|QsQKgR1WGO zdz~GV9m4E8-P!HrijGoauXHJ!*-k$L4ARebHnTJ?e;?@}ZT3NIVc#tI#4VP6uF<_r zGs_N$Ja*nRIoip5i=KbeeLSHaqarLx5zU5KY03Bdx`z+cW5l+&Fl7aW)uAe?x>60t zoP8fQU%{BgX}x^5N{~)d3kjVT6hc|&^~P^fWcE}jH&vkALJ!uXUlk|=!EVzf-Y{LF zS2+kv;Wb^z!}~3RcvMBiT59uglfOTwUuCNTS!b!#dXvAus$XTQ0 zGJX-ZIKUZwoi{~e;!WnY$=TMS!y_`PUuBroxM=ClhGZdzg9P;}v&)-*{9iyiEa@Lp z1rnI8pT4CXu%v%b706lr%H%k${R+Qn^?t2O6-a2FZ+`R9Yo<%Qs%|1)6Hi7@{WL!I zTX|5IvCy3|FNW9LI9{yGWsgFc@4`fT)irl3ZMs;!#kclhRmlf)H}bMB_pP#YP@WxG zsWKt_mS6ByB}-@!V{LUd$W|dH zTJ7V8{i#tk7C`~9>bR9Oj|+bM@~GgJ4-X1O!z_O7V=wSd?_&^ z0GAtHl#bcc6*D#BI6=H10LvH@e4XMw$}!yh&ai=94hX;voDdve;2NbE_pwJ%o96aV zvyb2TW&_GKzM|@s8aMH>5ku_3WC&F9s-e2J@m>X0>Jx*;bm)-Mbrd-=MZ92;8Dk?C zNW^%Q=M4Tw_*nS3B1xG-4lqLF5oAb5^`t*hX;!=DBzx!*YHV z(a#>Tp_}IhAX7c$cM*#!eK@D~o7xK-h2Py~oE~~jD1`j^I$iqT< zR*Z$EamxA;S1;bdrs!YcB)ZX$e*BIr7)21{SO_=V$btteP>AAL7PI^Cy7t9fyhoPv zptM8Br&$-rVHXK7E4i1WaYA(P85zeiWwe=XJQQ*CH5R+bw=JDOkk>|{;4DEU9zi8y zSy!3DaU8)BoRGCGLB&6>Sc*b;;DrY{a3KROq+tvJ1Q5UwdT zaB^>EX>4U6ba`-PAZ2)IW&i+q+O3;savZsqh5us}vjljv9N382!7P8igRG*_qtqsz z)Do#;RVMP9V=mBU|KI-}^FRD5AygAnsk!BB`4wAizVl7B&#&jJv+@3Yf93f4nfv~_ z`S^k7rNGzl`oMm@zw>(f{6KlV-;M9Dn=)V5sjmyUj|<;lL3h^o>$S+&g@S)xcdvh& z_WD52UmoZ6$IsVgUl;%N-?0#kmAGE;#_!;Qy?^`XPT~2}`4jKo{D7YZzEAJJ|1*B( zBXmCm@Z-B*;%DD03!mY5F7$q^{7al(ue*3=z{@aS2|G2aNx}xUE&DURF;&;xf z=j`X{dd!A!<<~>KALV|>8@Wh)qLKMs_>=g)+~1Ag)pk3{+Tu}%v4ej!=ZW}n(Jj~A zar=IqZnDJaw=aD4e)@1mz^!%7w3p$B|n{& z`w;yg11_257pwF!giLa(*S+~nareDG`R8Teg+!!~Lm@RSbdnWgikW_^lu$P*q?l65 zsic}(>N(_?Q_i_$;Z!f7#F9!brPR_&uc5}8YObZ$+G=mU1z;?hmRo7HwbnZ~?Xh#M z&igwr3_rq%BaJ-DsH2TODW4fb_*|KlAoO)>gl88*YNsg~Swu4749_*tPrG#Y_L{tNFJd{AU*$qaNGN3~uEzA*Vh^ zZoQ_lc!Fblv^kO6;n-d$I=P!6%-vjAGc58mvQLs56k&B-q|jw0_EQM6*=722 z?9R1W-aHC5yhcfZg@(CY;V!ZAUS6KP&@g_?V#9(WPFf2|Q0gehKnhR~+~3;JSL*tl zq3R?o$JR0(psxq#rJl>`nX(Uw;wvZY98Z83rZaiLKd^n_fj1?&_g1Jk0KOgFLvV;I z5$7h2?kMLtD*a5~(?LcYP?cMIlXvDLevc8!79qatswomTsI=3{XU8=ct)Bo>Qq@?c zzEQXHY8!eM3X-_)<=@NT@9ztFEklr==Gc7!%z5TR|F&LMC@G&kj~S8J?zH+@JXl(- zBE!z)(m4`k*5imy-wAGZhfCHOwhOzr?dBP-9ExJhW%8JZ>LQA*&ooX+qhvUjMkU^p zTlQ3}xY-l%12zSgePamwX@5rjR10g5?J2oA^MkhFP@@)m3 zfSR)xVr$Jgi;vre4MO_D`)#3sqMm@7Aj+)1wd{J`ee}2pHVe&>qTw;@>MXdXT+~OI zp8^}*@Zr$bNIOce5(x$4AAC)lm{8Rw6v)FKun#RW_Ocyx>VCzH$f|AelpTD|gWgH? z_|(*rTw^W>ZSHvbB2>xj)cL-wlO_+pD9l1HAGkRAo)Ju)#;`(WTbffhd)s#{5u^{N zR)}R~e~KXnxvj&qqjfD1dRS*}HA@i&JEuJ??lf@p(Ah+9CEkG4jl)^pa5~-91fT=d z-Dy~Y3Eqy1F54;c$UJE}KrtLJ2z{DX(LON4%FT{IehJ6qioKbP)D^DqyjB&a6nd51 zsToZTanL(0uRtvnGM0U&EoVm%Za)($u(y=lFs1o5iA7jtEW#7vB^qygzrYJ=lvYL` zYpG%vxpvNv)AHnxq)UK;8+~rdksqQyC(w4G1svh#>-PJ|`B0~W7;=$R_9mVsb8EhZ z-VXQb%3Cw-)LwSTX;7-v(n8P92F~SJ;n<48iUTfe&>zSNa`@J4O0{j6YjX*kJ0CF_ z798XpvPwKHlvel%Y^E+4u?>+ zT#MjsGD>AAir6Uq0Py#{UQCQIdDM3&vXFkit8V%Gc1KV~q%V(cOVfxbcDdTi_(WYONkeHCu+stg#mc*3aNs<1P&4@VUJ{>uZ3NRLafLN6_8p#(6?$C*Rqf!Uf`myJ- z0(YmSw|A$Cs}^Dmz@}VMpdp57x?XW0_D*STipK5Kq6PU6SOtIY5W(FEOj0+^Z{T3F zj64YIp5Py9ZgU|F50ufIn)o<)?M%4FWpo?_EY>{(eYP{;R{-N%VFh~P$|O0|H61); z5XEiXr6XK7(9Dw(YLMHJ1`x2T-w(|nCLycXYB*w52sbB-I>?|)snaxDI@)1vHCTm7 z3YY3Ws?2FTzLO6WyLdn)=QwH9f+DrRPn7JwvPLj})fB;4 zYN5(cZ@8y93to@IQov-JjKfWnkI=atkuP%yVP)F06gb#UxWgi^zy+RUObV97&S;v{ z4pA8Q5^KORz0TT4Xw)uRFamZC{e(_&YmF8QQnE@m)kXy)q6oPK{0I0o7fpcn<#Qh0 z3g(rZ4)THr9(-{P{b(UGYO_l)J1>l*c3CfNLL(Y<9F555$P#^0k!O*lBW+TWe0AV_ zJGAr41m^dO-TYqCFG31OB|2zx35sVa?#sK31jzxKBY`UpNO^7?B)n=>EmU-+(n$Dy z@R==wD6eRF?!8_iCs0H2|lTR8f zS-=&EMRTQbQ5^^=yy~>Zg)7zou)7vdkSXX>-2HT(&E-xd3~-Fj4j$?NtvdPxTS+8= zZ?1TvNw;|srV>dWxr2Efodh=^9$j-u0H_IZVP^wkSV`E@nXYEMpNG((D(DC5J+ zBGYb&UXX7SY2pJiP!X1z7``Qur23agWI6_0C5>=s0vwTr#*8pLzzx>RMf_Gq^C3784UJl>J6sIJaYba_pumN&nAf@V9}Tq0f}b>~P{R`hfYr%0Ah1mI8;wmS zB=h2E4a1FKj2;<{u08?93*aH)T9q62g8qbgLH}f&4Sh-;OE9qzV(=jQDFco$qp68( zMErCVbMDtXC7=ZH%%=yg9CwPxb`JAVF?9p6Z1Dd}S}9EMFa&rKXmDd^tUdCgfalN} zPxBX34f9uH4^|Zi!CS-tZJ)6lJ(HfQ&X#c^Gz(BSK^)3*G)3UE)eBt4ort(~W#3gL z9=$F`qR_4?p|8o^L}xsz1m$0fp92$thr)bnO7zSWo{7SpC&&S$J8`&1@4muaX>Iye zhTUGfjs;9r@)?2}H;l_`P$Pl1DO_^6i*pXJFO(E**i~|y(i${2AXAFQJ}1N*iE$$# zb+=KPS&_KvE~9vJl)3}vU6cKjJVD&muR_B7qAX~CJhKc+^h?-Y7bZ(S3lg^@Mhy)H zDiRW&nhv;vjIg5_j6@py8unTDtI z+ALr;?T34Ug081?As%-d8eTihjor&{SQ$kziX(o5h|gr7$t zO$ZuV&Ig!w=qhxj!SUgsH4%UYsIP=?24KnXj)L{QRRa(h=*J^;2`;McKO$m+Ez?gCss>t?k-QURb+?Y|q-N$wncd7v+s| z{c}70@KrnkKtsCbvMyE}dbnIyd_yZqD4|RJm*>?b{>bHJfSLg9sXX+*D&NHMRzvfa)X)otku-Gjez?(XOq{zgpn1h3;`YA3<93 ziS_!#9jEJ#D7LFgb<;aD-iCEn%B~sY+EA zb*ELJ{Z(0o%5soYi&U0-Doamn{#kc@rchfkPZFh`K*;mXB9L~f!%-zbI=2S5wV}ZZ z3&fO)Zn6CE!dxn&0HfS}imgF@2iT-pdk|g)ZE(;s4Gw!bgfCs z4ZZ@rE#6R?QR9tq;GGz>k4x=zc@Zd%RcPEnlplrzPS83J!)Y6A75`$mN*V4HABIEZ zP(KC6hX<-5VZ0Nvo^}F~t7>-!JIeBr%!Rn%(jC}|7(sSu(1TQ*!U*l;{-XSc+=eLo zEeQ?|riTwjkfBKDXMHIG&%Jq?s|e^<@)VZIRgvK;*XzI$ITIlHBACunwQxNyOnP4V zN<7t$PUC|Kp+Z+0v>^D+dW#}~Y%ZReH9!0S2}(aB3_YUv6HiP2v@f@@u@odFc_u>ZPMBj5 zJ%bXEUeQJY?RA|4{Th&M`cTrYf;Lm&@ZZ2Na;>v^s+P_(t>`oaQK4{r zE*Y7up=H9N>&y&mv(25l;)C5%t4b)Pc)o6QMKjv>ovwrQc5-Wu4feoKetff@I0C*ewkJI{PgT!Hon`ZNMV`hT0@x3^n1?;$?y^*O}v za?4d)ZW_5=EfbdZ#a0F~;`v%67^*o!AV$=uouX~Drq0aN6x~0c;h68k9PPMLIZj*q z4S`QaLhl#{m=%bNK&5zZP1plWY2-^s5CL@4)F~&<4UE^%;V9ZSF66t$a;c&9v{94@ z_L8XyvULz^gTIE;e0msYR5L$dG-wRQr?S(&4vIpvwjR?60JQ3N(luh; zXEFrYAd4-CG!Evr7meESe|B}AWmq5Jj#PVxxN+85n@b%)0{bx~hGz)cwaWldcLMrn zzBVmx_Ywz2Z*@0^TdV?~GEul#@A?di|H6v?g}6?d5N|sP#(Q3EKxmctxidE06)Fgk zD~ClIYoKSkdnVTJfi-Eoa-xQ3*om2N(vWvQ_C4>^%@8`fs?MKxs1?vXMRkV zYkn+c$_o0UZ~vGDqd}66vLLqdcxJ6jlUX#?l(`V=wz0B9m_ZY6_&O15_@F6g!)g%+ zRKK}X^$d+i+h0?4+RJM|tEsw94rymZQ+45N-`Ww|I8EReAgVkY0o(N2O(LoT@r5*i zsNnTtTc!LEZ&H6w2r2O!bwDW{=ZPHk4^B3$HB1~f($DK41zlM2Ff=t%{`jh$n%VK6 z6c7^IrF(N)MdCNajsVuj3l`WlC!wQ(=G3VaelUntq;#yP1lLtRXQG-F--Yb21M7~9Ay?CA*Qm{|QA+)@9?9_1=1$a+MN<^14 z(9}sC=V3X7r8?|&OG@89jz`j?N z&^g-MyjXCCWkqT)^4Mn;Oab{cEAZz_%XBJE%E%IH9vw@;ZML5AB|z6iT(84Jlz?{B zjkeU0R6XYYKHp%c&a%Y#Y^hscP-favcc)`CoGxyd1jS>0lNy;;W+Wv8ePYE_5Z(>|A6Aiy|eD z+6pD`cI1V{i<}~af?%x9<5WGzp6Pvzh|dJA<>MI9ELW)J=NQpRI+?RYM^J_4++F90 zE&!2H00`K)`=q7I{2YXdSz=yy<{B_oSh=! zB^Cz%>Wm;9i$g#Xkz1m=;3W!IQ|3h(VnySp41SK_xb<-ar-bJSjt+9^1WvmraOk1u z1kPyB2^>4eGp&hWyAI)`do~C{r_kFoEkY+gjmbMi+dhlVV;DLbBntQYZY%0Mqz(Sr z9}cWcr-LXd9;QmQC7s658DO3jv(Yjbhm43c22DxDEo?@k>df6kw$EO!=4SzIyse*3 z9+DKJCeL-q%k$_U!a?ilpePjGaDF}_?P(k1@nUkiBpidhx6Uqh9a_F41bOQdbb^GJ z;bKiOt^$`t0p^FcFElh>CWRTxfpv*K&Im2h9HdB|tx$A=h^qswc_K4(oU>K=I|LT^#lkh|UD|$v19_o2o!-MMR&&tl zFvJ4k;W{1liTFLD8}xs4@O)T|*C8PK3`CIKfOM9&b45HlXBcWVc%x{BM@^q0Ni&}VX;2@X zrh^oBV2M@*l055Ma5&4eI^(G`9x2HVXo`h-iTjTOgx`LBA0P*BF@c&<%s1X1vxmd^ z+Yh`w5MIy5&iOWxyUDDyjZ zgOqiqNE1#kFIvXtwzKnu7{3&nhh=R>CGT<1Yn1ftRS20L5{EOIHg=!Qi&fH+uAgb& z9mkBW@dm`~Iv+_CWFL~o5WwlAg97(0Hp>5RZHR0|33+pe;&_#j?pbhcB5N|bqmg3@kXRiFkM23bsIe^{yhp)xBK$d)t||6C4gMShH~K#-bS~Z9V;LIq-*f5D;S>G8 zIfB=_l=xbTtkr8VTFRXe^k{T8UB2~n4Vtq~uz@c+w*{ArmQz&YvL@ zg3QuEa`f!(K5>MV4JI zB9=R*e)BY1&VC_=g<7L@tF~6)_@99*t>dpXftgRz8yzir1oUqM7uOw4*#jr z0|hlWD$GLJ000P;Nkl>m|{5-f)pVbX%pi^3X9aDDO96?tcfV? z_Q!dg*}0tG{`Tc}&%O7z=R0%fH*??4x%cyX?s@}Dk>_f>Qz&OOseWx zz(m`cY&#nm4b1-+F&3)2Mp`uWnI7?+g`Na4&Y5IypLIdKEew5dSD#TXj>C7 z9XPKaS$zq(NklePB@k7u18xPD10U$aB`*V;fX%=b5jk9eJWK-ST4mv~K9*Cz0&W(O zgH;1WRX+x-2BgfX|Fsuzzlc1Oa-PNki-Bu^sbx;@G;3K~XdtRuV;0d0)b?`fE5P@F z2SubagU{$p;C8EG_omeb=6J4vsA`*tw3Rt$6tD)kx|h=qSpj;lh`gJF8sEo(JApa9 zoc1^1Y7u!UPe8hXHW67_;`m2^$AQTuZtczkY;3CiA6ToF6^2a;@ z0oQRi+_S#}xJpEJQ(&p;xxnv$DW1b`0<%O!v;JiDln=%}U`Ekl*eC4Yz-(Zv=kOR{ zqpDtzCm>1f2j+xo;Rb?o3|I(k_Z&VS_@%1W<`KwYV0JaZ=mF(8a3%19=kN@xxa1DV zS-=fcsYK)mumCrnlGLrLIzNw}tk#}Y;wpslD)4#Wq^F{EpQ?_?8<3#@We2XWE~yUz zEAj}W>Y)I4b34}&xeqr45r+ktO;4mFF&pYvuR zLqT_2+bgNt^7zTBdYJ>BuMKjAs(v&A$wupNeRoNHJp#$bN>1Vi-jZtRYr7-@sl3(z zM-%%-(fd$#v1?hXIt5q+%mgOjI`(RSgTQOR)4&E1*_BcLehPdkvHK|znO9XnRJ8@T z1LyqK7qtV+MdXpd`TPWsbYtoQ##ujDv4N=ShpoVD9^fr@0!u{X9|7~cAGk2F`!gc) zXrapxRlOAF2;Dr0+cX2us_N{3S>Nc{H@DD*y202?X%CkEXe_WPAe1dt1;jp6zoxSD zMCyP?%x^4hZMAZvry9+LwU8FF4!;LWB`oC^0oo)-a9)l{O)4A^t4%ehm_)Osk+xp+ z?3-9b3%Mge8s80&#-73hu|{8GfTS8!b!rAc8j5Biivp%~Rm%5&#j~$)h5F2ZY0XOc z{zpCg#uN?6gn(&{k3h=n{3-`>MsF9KR;GaM;?(}GVK%TIRlY59f`0ZFma~wzJo`qa z{2Ye@zO&r9x|Rdtn}AuLa-Lp`K=K9T>449_t!O}w^|u~f0c{BQ{Ert6$jhF6skWkR z*tI>tv+om;zZMOMZ^@#j0Qp)TAdTC|p~?Yi&jN^zf$vN)iJhUx+0(R;X@T;#7&k$` z(s~28HWx(%vYS)Ay3ey|!f9Jc2hf6B^`TOF4`>PL>`+l4*3$G%aGV<=b2j&Tp8cyU zVHxv9WNV&V_kHQh@(tF{50ycKGHsA$%1&Tf-eEu=`dL(U8qN(TY5LP+bvUVKx>5ZZWT<=Ba9Pe2v)vSzn^%zAC=P41n13+dk*AIjXuezRG~L ztRA(kTAX`i(%I`jKvTKNC`Fa3wgPPfXw=rEW+9fq|A4P}4xPgQRxK!sY@yG#5NQn? zK;||94|@(>qpH_ND1m{PZpJ^n`x6U45ut0V$;5Vu|yb5bvbykKp<_?H02-$*v0BIKRq^h1- zMKDeS9>PThB99h=MG3eZzuW0dV7sb*unJ&|#`$simdNPN4G@F!9BzEV7b9>I@F!c{ zpr|k|#C5JN^c?QOtqdz>IqTTurD|U_ zDLlYzoI7Z#TGms*4I*-&0H3IPfG?Ifr3)8!V{NH!mVtDg)THzKLbjYeaC>MBJ$f3*SF$gtKid2Tn}XtGtiUg>9~+b zpX%kb{lJwXvZu_wT1!PB$^?jIsumYpWLcT=sss2wusREiu|c+PQGN#SMqqK8z1pF| z=FJ6u&Z(X&Ngc+8&3h1dCe5jH)|0Rl7xuMWIKNKdo4|c(Pv;96SaKxJtNj-G7FeYlLjp(-Fq46A0H3vgcvjjD+zULMWsu7}1-Kt+wVb6mkN4UD$^HwsF4z6#sDDMJ0fG2HxK5vU46lGP?vkgiEZXBWk=U`orTgaduH(L3&wW8nO w4&uCIUa;TxJoXGkMMXtLMMXtLMGZ0iA7PZrs}BSc - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_default.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_default.xml deleted file mode 100644 index e19589a97e..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_default.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_pressed.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_pressed.xml deleted file mode 100644 index c4af2a042d..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_settings_pressed.xml +++ /dev/null @@ -1,13 +0,0 @@ - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter.xml deleted file mode 100644 index 4f9826d3ae..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_default.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_default.xml deleted file mode 100644 index 234ca014a7..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_default.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_pressed.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_pressed.xml deleted file mode 100644 index accc7acedb..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_shutter_pressed.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_switch.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_switch.xml deleted file mode 100644 index 691e8c2e97..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/btn_switch.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/ic_launcher_background.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/ic_launcher_background.xml deleted file mode 100644 index 0d025f9bf6..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/drawable/ic_launcher_background.xml +++ /dev/null @@ -1,170 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout-land/ocr_activity_main.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout-land/ocr_activity_main.xml deleted file mode 100644 index b30f35edf7..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout-land/ocr_activity_main.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_activity_main.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_activity_main.xml deleted file mode 100644 index b30f35edf7..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_activity_main.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_camera_page.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_camera_page.xml deleted file mode 100644 index 6f31c2c7e4..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_camera_page.xml +++ /dev/null @@ -1,160 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page.xml deleted file mode 100644 index 958a859401..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page.xml +++ /dev/null @@ -1,160 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page_item.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page_item.xml deleted file mode 100644 index 6a2b09ebff..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/layout/ocr_result_page_item.xml +++ /dev/null @@ -1,26 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml deleted file mode 100644 index eca70cfe52..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml deleted file mode 100644 index eca70cfe52..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher.png deleted file mode 100644 index 898f3ed59ac9f3248734a00e5902736c9367d455..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2963 zcmV;E3vBd>P)a+K}1d8+^p? z!e{m!F(8(%L-Or7x3OYORF&;mRAm8a^;km%J=s!AdNyc=+ezQqUM;oHYO18U%`T}O zHf$ra^L^sklEoIeAKmbOvX~v2@Y|vHs<^3JwwH?D$4l*XnPNs zMOqozmbkT?^lZ?$DjQ9%E0x+GsV=1PwZ&39Y}iI-$Fb3d%nsk+qrN@cV=OmQMEdF% z)iHMl(4Yu=cIkixWXtwMIV=>BvDSrHg8?)+vLJKozy*}$iE>&gGGonlG0cJhG&DRv ztzkg-AO(q)B7~G^EwE#tK@nqmJ}!(Bqtf z=eN{I?X#P!Xx=uL)D9cAk=b!~&@H~6S)=a?R4fDdP{-5E5X_!5&FwFJ^7&W2WS z;CnxBCOsSU^v-%(vad;MPukr;&+ciI+F`>sGCPiqHe`1A1|N0p^<|#<+iECwOG@y7 zBF$;;0YAhxtqK7O0SW;M0SW;ckbsQ#9QTYyC*g`2j%bA%1Zh^g9=9l*Cy!I^{_p2$PP2>j_D2AybM$NwY}iJ(ZH9O3 zlM8g4+dw;}V{dlY2EM^Z-Q(AmcmO|Ub1&3EFTS>iuHC#rcNo$wkB3@5c#lSunxsQ) zaA7tLFV3Oxk}X2`9qVL6?4fcq?f>Yk0E0IEcm0~^P5ovLLV$&D9ibbZTOt4ivg_<= zu^#q8tYJktl(egXwj4c3u6N&}S3mj_9pv5y{gQvL;&nM}TeNE{4K3O%_QAdpCAswa z`Ev>!oQREY9uPqL)g(QPVc1U`Q3An`+x_7g8edZ^0zdcpXNv7^!ZsgV{ugB){w+5&3-Wlp}yI7?tN)6*ST)-XSL4g8_rtDVlw+a zE+K|#(tV!KfQE22d-}7B(mLkHukIp4?na@q?%@4Kb%u!@F-ww?o?tn_Ohb zPi3Do`yL?Y$rDPYtEV;|250yzpS^rZT*TflAZ&YqC;by2Ul7NTZHKmC)9NA6Vv+>C%^1XhNlp5*!7zxTTKfHTPhe?@XbH=VzWEuCcmX z@L_&qCB;=(Xi;-D&DvT)kGOiMQ0&YQTezdH&j4D;U@#9&WiZClJThS7w)OHH^fIT| z+jn{&5bhMbynmM$P<0U*%ksp0WUy)=J!n9~WJ&YNn$e3{jMFOW6n~uqMHg+M3FY|#>(q)ZF;RS(xqTh>S1Ez_jfFig z#ivbPnZ26mv{5wdB5SFYrUNM5D?g-OsiZZK?hPof9gqf&7m!5-C=d>yOsw<)(t*G@h5zIY2saaEx|99pU%^#gvdI(Qqf>)zFjf zN}5zm9~oT`PmH~EF012{9eT8?4piYolF(86uiGy`^r#V4yu7SA-c zjm})#d$(Kx2|Yn~i19Fr<)Gs+1XaUIJs~G>kg>3 zkQ$CqUj*cb1ORzHKmZ`Ab2^0!}Qkq&-DC(S~W*1GV zw9}L-zX}y4ZLblxEO1qhqE9Q-IY{NmR+w+RDpB;$@R(PRjCP|D$yJ+BvI$!mIbb<+GQ3MGKxUdIY{N`DOv%} zWA){tEw8M2f!r&ugC6C5AMVXM=w7ej#c_{G;Obab=fD={ut@71RLCd*b?Y1+R_HMR zqYNuWxFqU^Yq9YB)SmxVgNKR;UMH207l5qNItP~xUO*YTsayf1g`)yAJoRV6f2$Fh z|A1cNgyW)@1ZJ!8eBC7gN$MOgAgg|zqX4pYgkw{E4wcr09u#3tt$JW@xgr2dT0piE zfSguooznr3CR>T88cu6RII0io!Z)mN2S3C%toVr+P`0PTJ>8yo4OoHX161h;q+jRY zs$2o2lgirxY2o-j$>c;3w)BT<1fb;PVV(V`cL*zHj5+On;kX@;0)6rF-I?1)gyZtM6}?#ji{u+_Jz`IW9a=87nIA3aK2~3iFMS zzYP&fCXLEibCzR_6R~#sKN@)HB>);Za`ud*QCaKG8jEwqgoknK7rwW`Cq?RYYE5r+ zh-YUqJ082>*;EG`_lhV^vHEM7d+5Y#e$d^rC*jx{U%h3B^nU%7N|*y`o4g{@w;KP-89>&W#h zTBB2vTk*S|My+4jYTPKdk6yR3b?nAfcd`FeC@gttYuGBEl9wuf8`rOD9VP6`bhNxR znvXql-3ssVUSXfvcf^2L5R-^4E-s=g|M$Wm!?BMl!51d{AS*7Ggjwh^YsbK?6jgCA5T=(9$oK{{z$fCe9x5IJ^J=002ov JPDHLkV1g@XpTGbB diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png deleted file mode 100644 index dffca3601eba7bf5f409bdd520820e2eb5122c75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4905 zcmV+^6V~jBP)sCJ+Khgs=qzz9*aFfTF@MBLc!81jy1$_D*`qMnYCeSOOSS zh~l6kD7e75FgOnvP=_arGNJ+k0uBt2?%a3It*Y+o?&`L?*#fV=?@xECZq+^KuXD~l z_tdQ>JOSF%q}x5h@>Id>gloHZ!fr_@%N)Qad* zI}<}@Poh`#X29>b50CkB%{yWf?z(t0rQf48W{j1a($$IrZ9{N{@#9Wqx}%DM^fL-m z`X#_s9{BwX>^};}KMtudHpmMyRCq34!+|XCtnqeli6}6}7JiE;H+GAtDViHuQ~X9` zP0^{y>Ov~ufreT-w7!yx_c;QOV>|0UxJK{lqSx`7cx`b!OLV*;Ez4q9Y_XdB$PKk4 z+Aq(kmz%WbOV3IpYsa0#_Vd?)>*2Lc zn) zvVw}USbx|rlL2LMl<$^rb@TnK-;J83fd3GKh6#=C5WlXv83lKz{0$(8x1g-%;q}$b z1=&8M<_eQZO4eJk#nshu9TsZZ11Z~hVkpt8oA4831ZP3Fj3C~EG*%gSnciYD-cpkI zj{J=o1Bg-kJrjfz${Js8D?vh>vJwR{=4)c@ZtTqt#tHRR<9b9ew~kVG6oc8(lNE=Pu>)F6HIf=`kIH3oJBkSO2;+SnG--LDU5kx zC0($63w`LN)znoR#GhW@M5n&8!EGBnj_usF!G5qm>{qhQ`sdB#K+CoQF7f-se z?#7!W#vF7jw48A-)Ulxz@0b)?7iKWQI+fE6Ud#Le4H#? z*wIeM>mtaY-X;WO^yfR4Adp*W)N+A4Yv~TqOy)a5g8AjAEfJ4acRWELKhbNNKrc!( z&!ze1YQkhsw=A3()t7B^pu2=1)CJq>k}s1bv-{fV>=i+J^=8Lh=Pn_L(@77X+QqLi zSM!u0YfVL$I)-o^+D$g^8iKevTQlfM$k z8A}@MLX0cd>SIdp0%mtcJaTy&g94$WW9QB?a!}a+T)Rd$eDM!(fgHCnNCsx!svv{S z@9-MjC~sfoKOK+dN>{)_sV(mjhof{qxwvX-7Df1DQTI(g)o z>s6XRhgIhE&g6I!q!Sxz>EW}#SnudH5WeBSekYPp`9~Vp)1-G^r@B46=-SWs(Z;X8 z02evPKG%G)Nf*Dpl|HNSeWdw0`U#|(mpohWGktDRF;Bo`A2K9T}=|{(p(X*E>(aYDag2maC6ay^+ zk7K(%-yfyPJKv6-`qy{#2oNV$%o|*T^A7!TivIn?ahqEKj{ka& z1#*R?@}3aHxtTmO=~U-w(|Xu(B2EmI8B50EvnOk9*GGbcJZK_}E{D#X@`(&j@%hg` zvgc+#V--FuV!3MbUy#-AgE($~;1gULUsw`94gkTgN-nwH+_TiyxD=9t>#{5GHSR=+VC|3HUj>p$m zF=5TOh#WCVpZxG0Mfs)VLU~bclwVS}a)Tud>)$I3M@i?-ZEb;CNQ$OT?W!i>WPgI2K-%bDAV3iV{YFpxIA_D~#F;z7mA_2ToA0 zz;J#$$gz?H{f~tykIYwsN^&ofDHEcc3HtMs_ksmo_H~%=S!trXzdzzq@XJ@P(yd>A zNh?17fF3z>nk9kWDu3|gPt>$~7yTPdOfi9U)o%B9hiOkpO1&hgnGv)+?=lcH(3zlF z)1$73Anp4*+{T@4Fog)rOQR%n2^~~bNRNp!ZBKCK-@noL+ER9Y8^~8Se*UT3c%b7TLtsqf14?X2rJH|pTWGz8-n&h;14Ov z#z`fWWiO*ed){^1em`8ly%A*0PxH#fdX?ndqyYz250dgaflgvo+ zJV{-K7`Kl9diHm3hJcly zengd6QU#LyA&GQLke(wb%#d-6v?HDD3F1f!>{yWg5#|xN?9J0WD7v z;l~T-X%q||!6msgyeyyoVe>kdc~D4&(TwHYfu@{&z(qUzHQHR6u}wE)#*5x&(o-7O zw@7jXJiKu=?N?bq2i6qRnT;Fhz}ixmnKagt?l)w-)BzP^3@k~*Wp97@gTqNpbZPR zy$S@S*a*rO5riY0Ud8DORwP?Adna(v!QOi8<4{14v_(t!#gLwrT(JX4+=L_$A%|pc zXmt?{(xut$cSLlVo(30Y+4jMCjtGY2uwS_m`dG?inGHD{f(#luthNkXB!$a+a>Yn- zK~O4(yi`tCXd{2}Q7v*n=1Z+W<4npgXvmO$@_f~4uO9n2kmNBzD-1S*B*<|l$eA1@ z#7YnNRI?n@&u)dVc}PLoFRSt;=(FF*KZU}pY9KTJIT}LH;AkK9+f+gq?~2G z5#)j#B*jLMG&xp+>KqBOk%JavBS>X$J^3kS)@II(S5WsDjsv%=Is#fvo%C=}VJ79C zu4XlR`eZez2+jdtZkwl~W8jW?O+mCNa{m8IZH0?IgmNQbXlLF4NHs~k~IN5KqX9?a!NuC1W) zYsz_4m;p2B(rNZ|bq7KTK$6gs(A^{fuF@Y|C$u<+ zeYYY3Gn!;AyU4%y;QbOj@OvR}OAX~1e60jYkYi7fGch)Tw9J(lK@#LJf(#;pbZHir zB&II7NTQ;~GF=lByQEr3##lyCO%LAbWBIf<~=H3(^R#^&aTfo7d6DH>o+Z>qt5T4kD_BN0|i~wM{;) zQDk{ivKxY=^BgNdF34d7nZyJ+lfx0Dp`+JSH331CES`Ogv=4}5y2Zs^=PLgRUr*8)xq~v8}M$U zLOie%h{Y~;4ui@DJqJtzG0(xF97ij3CmS@3983s@mls%CJveFs=+cwd>4yDCfvm&e z!5#1cb>BZeo;3I6^_Foju7YH-rfKy08n55>!E;8!9e--mI{HXM9UTG5-bio}4&^qi zE~isoTuo;*ZeZWBo`Vxk8!8zvL!O6k1VIoUEds_IbStzRBxm^3Gm}w=_OY=YZzMUw zCMRKGc;U#1X^+ec$Xs%Pdmk&k3F4CX?~8#O4uI@BY`Kmq!J0Uv+5@a9tSpblLOV))hr-m%u%E*xX4>hBnb`e#B{kyo18?4;4dFUw7M^53Rybu z824~aV-c4}JY7hR>xV*sAg3fy6mLS7LnaNbD2_RfLpjc^aO!{=GM5BGo|C6yB@D9o z>0^ok{idSKZKI>_xtZixNop4pgLk193Gf?Ao}Iaq1y@!>f+5tPYW8ZSJw77VrMS#< zkU%RzE|Nf;cya`#HnR*FQxeQ`<~;c>Y2!DH$r^KWEyp=Wij2g!i9-MbcG4!}i^_bU5@kB8)I8_7rlg4C4#@0J#r1#qtCFoLQJrO9E% zt`s&x4TB&q*Dj{y&(q&hhKJ${y!SHMP)2fle^N(DLRef11H>ps$3G)mFl*0{%0f#} zK?dh~_$b?`;>l7qyL_2N&lj^qc}_^Fh@jk*X2^mq@ZAj7%2fh^%)qQAA zZ3@z-Q#;=6kf<1C_wHkrQ^se@o}KxQJaxedR`bDn4a5ufwojD_f5pWfSc3vWaa8IF z!+Z?HAa-6lxNq{aCuDPGysez_-`RL=-eMvHI(P2D`bHVO)$w1e0^WP&R`mBpOFQKR>_w07I2s zIwmM1dOoD+-D@HOzvDhQc0abkw){E0*){N5cul3$g6n-PcZs4>q4bV;KlnN~%kbn}!V8maBKN?~PDN77Zj6xT>KxccMrJYVYoo)adu8>W% zmv*U9KCo@D{=sCEstjFGl{%?R9Bd_S;`C@G{FNG~X;+5Z0h*dJ1r|5g4wB8=?S#Zy zt3sAsXM@aL)nWAyCYz08&uXYp$}38nkeVvA0^C`|ts22ve2Y2>mf~J~_Til&y|FUz z%#l)O^+i>bDr7NsoiC}@GN^5^{=sAkPSF?VF#7ysBZm@DnF?;le_~|Un-B}Itc2u|IlX``0V1M3jKlcCTY73+_+5_^1 zO|_7<%PEyPhbqxCEnFv#uom}FdO$lY%`OKi#h<5Co8ZPBFZA{I!|wAx!c?aisEfxs z?T$*AUTc9D8_Hpt%L37MoudCVml+QIa-Q{X>F$I{4t=051yd2KXJy7g2ho;dPy9%m z&|3%hK)bgG?)N=_y3^l5BAU(HpEX16sc+%jjdr-wd5e*w`^js6LDPj(u<}q7%axih zoQB@MKIp*y%l0*noe!-3>L8Nvz`X|#;P=}%;m-Yg;Pd%Hg6jXkc0~S4=WWP7_Qlvb zG1>9)E0=~O9SWcSdXd@th$;|?3QV+Z@1bR;tdb%M2ko%(GTA+u#e@F7$5Mb+;mB`4 z!xVgv{Jp95%Y!hpT7-)jrQ~&IJFY@h`L?H{0L^~?0CJaZ z{tZjr)sT1m=#VQw^-Fg;S$l@ofMbuY0uykS+-JWJI=h~`ci}FY$50ATJ+%wA zO77DqVS>075^y6_kJfo$5r(}BH#(lkaYNw(n&Hbh&XQd-lYhgIk-UdHhZ4HzOR6cX9O(7$kLq}D}u9EB; z-dhHFDZZ<8Lc2GP(}(AKLrJ-Oau&a1s?6Nk^&FO z6KSRZhEqx_SQs6S0+Eca!Fb^G1gONmI zC+HbyhfVOuc?OI&h7uoNn}=`c_>iW5NO1q-GUX8K1^!Zxzl z4XfveR)GIBSo>}=cI+IH9~|U>#(X~teA-&84{aZTo0BMk;yjBqEL^gX=_9kDnP=}a z`+sm4^17nldnZj&U`51GznG$gf}Fz|OlbvM2~cNtN6bbO;LjW>4doDpXIHr_#-WEK zTp3oTSyarnG|L?64R(Lh#u7IM@+CF;0?j-dAKR%u-gp$bMThf`Y=V%QniZFqb4;b% z+^sU^c~$y+58W}2ds$fqbXadxS)oD}YcBF8+Kmro`dqK7bh9_jZo>N(2|7ZqH?6u% zs@LZQps|*E)s_+u&N{X0R(-hsYauy#KI0bVpUP;&tcc8vw<4D;UKP1mLj0?AU!cHb ztdAKWi}A~qZL?OzGg+1b@q^keUNsrViJ`HuE@E!RO5*b9*&nDxR@U?Q6pMIaj1kMY qJl2nQa+aK&iDQb84*TpHAJ>1BQ$$nT?9A!_0000+Hy9+Dw zQlg?UKB$_cZ8RBMYcyI%jkQf{#wz1Xr!PxQ>w~B~cKP~!=iIw{_rdOp7tZhwZ1+g(AXy-HL10DFmbXNx@L~ z3H0wQYEpsnp{iIyzhEeKgc((i$;}oAoqHl}Yb`&gx~}ISy|wl# zwdwQ;nvEgzkAnwYj%g}=Nide26RJwsNTUEE)Q2P-5}7cQ3Z84R%7rdvN4sQKhOlPcRnSrOp+WGP}nNJgfkDx!pMkypKGe90p51ezT#4MxAxQ zN3CC+fuRy0nP8u@+)%h}@FHZ>vWFTTCD?*bPf|6Oz4#LAYDsH*sO<_ z+8Vve2|wE19JrkK!TNc*tzkb>2=OxIfDS8-yiLEA$m0k(kQf0ZJlj+Q&+pg*@-o6x zTdEi#&vL>m?`;jX+>v0bbWnM`S<~tiA>-z6^m&Xo6y=iH&}dMDp40vqOvn?CbR0P3 z0YX_`z8klIalWefMaf}lN@-MvK>)C@OTMQsvEFV1j6zbmglN3)tDNw{&IYft@#yp|U;GYg&z^)Rt7d@u#0Bpe zimnOEmq&Tef~aWH7SjqERa#-iBMX%jZKUfNcy71bp|`IOKD_d0nA~D<-XkQV*jewl zx|K$GjP@M*^t)>e04FWS7-Uwy|!6q{ICob5gfvYaErq&g;Btk^VqnotOu zSN-|V;a*P<^rDbv9KD!YExR|ex)jop)as*$VeKa$K-3I_~rZ#$8n0D;V;;rwan!I2{& zEnl34toAlI^wpPe zlye)Ao4ycY%W~JdLaI0e(MHvF%G1SkH=uyAXf{=!ABS!n#lZ@o8CZ4XFmw8#1n{&R zVs(YP+3GCIkwRjs%TCiYQa(?iP=b^m$jib}=-N*{ggXx&44S-zukU>W+LOO#ZOZ!~ zOnukpUM6x&FsRNVXIChVTfbhB(rD_SHz|4}839cXjAmbiVtspfigR#uEFjIMj@si>Ore+Oei$<1cCarcfF2@0*j682U1A9rp; zlE=d6(}XYz#@Cd03QHCwxdi0=G&$N_{=Yy1XfbK~!v(L-Fa7gxu<_$VaOSVq1CpmY z8$Ujb&-~r%UfZSfpfHyQ7GTlb5>~#R>JqSaSxPVhD7~ea?b-3_j}BnQxCvh0zmvuF zfymQ6C7Oj$o(rpg(e8EsF8b6fI~#$e4S@tKotNPf@Ro97lv&dmNB}MOzKDHx{Td^7 z^e>kK&H&X>w(nxk__|+v<^;uhpfq|w0oCgN2n*&Uy98ur#zdLa9sUH2!{g=78$;%} z1L1P#zaX{-%}ARM>G(3`OF*1abzPV`HC~?1g-^B_&(OXN<=~`T0!1J)ouwb`hnx4h z9=m{>-*my^gYQ9FLp5Z*znzJYxJcY)*bL{8bEG_x3mc;?*yV2q=Kg#a+Xvy`pEue zJ2#<55|A&7Ku(lOR2IUxb#E82l~|riL@t>>J=|1!XP{(Gfq7D*RSSuh3Wmux1H9O5 zbzVzIvg#nSb+dS_bpfB9xub!%!Jvc0T8>$5O?a$?#5xXzQ6&nfaS6~B@Yl=oyt`5J zUi|^Lo>^h?bXpN!k$b{#I*o}Gg+L0KqjiNap+>{bdB$Wh1B{gdNt&z zkU*wl;*p0Tp96`fH`Pew34JvBLf)EFl)AaU3W$CXzIJ5}*_hmnyplOlgkJ%5dN1-^ zfYFOQ7f|g*o(nK@@|F3Nh4!=hOBWWfJjm^}QhYrdl{|g|c5+Shdb>Od$s<#GvjwI% znqg*ZJ*3tdIBXmlNOJbhCP>{}#ZfQ82y=FCgS0Is7aB~A{A+vOWk<4kG8-CsBA>N) z2Ro)Vo9)zRim|LCBI$`F-!JxDQG~E+nVNaMkGbGoHB3M|cbfqm?Jyjr6ln%D z61dqAY5B-YX2WN|HS&_#uo&dO1ZLdVcx6-*l>@yGiUd^twKIQ z1myy3dN1;B0z4enBibGcLp_=&v^1A84wc`CetouQG9=$!N7f##SDg2(;-$ z`!;UT3E!5cpgGLm)#4Fpf{Qj}^JF&E4%N%lmmNV4&oVB`hy6ytSLkp=a!l^3{cMD2 zTZ1ifMFW4}K)*?$c>mDR24g)rEZIEGUiM-d`ALieTX6^VNp)73C?Y9z`9d?=c(?d1 zs~_K-`cOc>&%IHK9z-;#Xp`TMv(d*wB}E%mPIu_y`4;N)(a6iqDI;Sfv%{G`Tq?Y? z`XY5qua{3ZRrAk6vM-O$&0Shch^Vh+#oUI{16*NgkrFgmFX!!x!YeN2Yr^QVW|_o)XG(ZcBN)a|R?) zB#;P8w$4loZCthCwyD)Kv~>DA|AHfFa+EnB3aXYkonv5irz&0+e_1c`|f ziIC%^3DMCrgrvlo!j#n640IkHIfLEfbrQs9Mtu8!_VBgvQKZl*M~Z$T%?|zlVT_2; lV%Z2*hu);6rydA(}wUDXPCF_W1vnaRBK zeoR6LNsxyaZGA2++G?*?dRwg0Dq5+E#aFEgnub(`IsNLD^CGWJ)s74L)DOcaT_gD&woh@MDDT7paS^E*rkp>8F->o#K*x;hPkb-{g{@G1-RXg&d5PhrJUf$gT>-Kc2+T~(?$>*Yu zT4h`0W>J$pZ%Azsi;{nVW%G=At*)awy8+_t6`#e`RGh(2zZ43)n*13}cE8;I5R%*` z|5tXk`=>gMs>q*$@(4m8?`JI1Q?{ zRHAd+JgRmHP9yV))rP7q3IO??4XSoJ$5!Su*=~JDub(K$fM<8yf*a-K*Qz zPelO^(`|+V_|-0Wk_vz*qdO0>?1mS)wM$Y29FC;)bEP-uAW0uG0ct9EO#m6#%K0RZ z39?+K6Wk5gE*|+^5I8uFyX{ALNYa2Nz%T`Hn@(}pU9*C57Xtylz}>iUsV2Z#2;ejg zaNoZ2a>iW@1kiDtzFVLPa8^~&DQ^ARm5e)008Ic*fO8jsh19y~Ki*W3-Qpae2p0nv zo(NXL_4n_CukY&uHM^BPt?*wD_pyjn&Gy=Rcfp3fUR68tMLx;5n(a64-U;9T#U52V zit5Q{QE!`~T|s99zY=X$w0cfmaNYW#0DU9B1CnnlE=a4Z9-s@!Y^>p_bSr_8-_-*O#n>*O#n>*O#n>*O#n@Ra~B|fQ*l9(%QQf9xcJEvaY~>ll!7d& zeMy*!>i>NLUU=_aXnXb`eD~hF-~w+IsQDzK^0wEj+D$`WSMKSA3v0K*aIW*wzx){v z|Lq;P{lJ5=b}1e+^O;s(t?biT$yLHOtC&t(07^{x))^Qyf&6nz%;wDIf6##eu8#&sKFHx$9)9f0Z%(CUS$4kJ%h zh7xEzhK3iU_R;u@KbYx|2=~79C&+BFEBd6;PpcBt&P}D2M4-D$&W5VeCtg1)xQ^3! z9dwsT*;DBzpVRTKQar!Iz)wS)Y_}P!pfNfWp?4YK(O3Tre#~%m=I?&-Fr?${tJVhS z>=lrTBvW+|8iS#2`i=IfwE<-R;44R%@X>{!`|u$=e(U6DgfD8a!sD+U6_7w8>_2iC zX4F|kjj91=H`?IFhx(x5cTdB<7oUfx-gpfTz4Im<`TO4(Xq$f9`@-{Je(C_+`S?TZ z4vcpQ8~0gw-iMFABs?!xhr3^RjtMxadO=JCss=`ts28z5FLd@+WjRbPjd{sS);z$b0hGtE^P}he^1i z7>H-yd;^|7eoS~C1QmcUcehUNIDmRU&%AkT#6+Jh?!%J56dPSF5W|cS2~^FD7Wvd} zT-c21)vi6B=%lT`_GJe6+|LDhTUPB z>Kqr7@|jIF1GGeZq0h@xpIiwP1yjb9Y*zKO!2wZMbhJU|{xvrEbS+BPy11i`MdHh_ zU@6%x@Ok(Gv{}~ZjMb!kP=K2@70hm|8K6>-+veseAW{OYUZ4qdx&3t8|MsoFVo&7r zBR|p`^0RB9Ym&QOBA13Klxzr>w7U5`YSn4T7nW@sCeFfg|s|3n!5j{|JLH@6H|aVdjq+q(_^fRXaK3P8tZdo9e@(iRu< zt#-^$ANe`N*~%uK05m~D0gxI2h64{X!b14LJ-fp52WMNa-_Ungz>n!?42H)aRu9tf zZn@BbcY(EZVhL~!%>xXh%jx{h69NHlePI7Nbyew@+aBx-lTRSu!x_l?#;y+Fs_qPn zFzyAQVd36CK07Sp-tGSwzO%a%W;so;wyOnR9>!fGhokSm2Wxk>z$}*;zO!cs^F5s7 zdN4|kx0C?4Z8H;L+zUX*9sl^`u!*Ba_}GaL;N;-QdrRble38%L9&`MolaSM3!@FQJ z6G4Z0_?!g@Oi9v1(0V6LNg6>3G$lEgO-Tm6-~7mZF&SDOz2J<8TOPaz5~@oX5^WXm zRgCN}thFfSJHcV(r^j|mGB%U)4;_7J+>jr_V@F?x)tyaH)Y%AYx|-ou6lC4*?Vr!2 zJS|H}beRSgvSlfiJk7T%A+RjP#kOg-=>Ybx$D05Lj~|1XcHQh<^OqD2_9kucVwoaqihgiFwGD}j~1T8KAq z9 z0*J_$7eGipRXI8<3eY7Ipjr$(pS5fpOv=;6o~r=0)r#cH3Lrr~6QEWsz)#GN7h+$5Xou}0dN}v_c^boY%{;YZ{WV+0(M1QNN9kM;!AOnLO zA!aO<$`pxu4!x90Kzr3RkuIy=J+gW&=9H=qA z_U>+&-|S@9p4AWyTLkr1J{JXz;e*%scI*>vDKlk)jL}tnO0kitDO+6 z?2}J&RYIn-a{R1}qm0E@ZB`_oFkdWy1o&B&jg?@V^{!r@`-SP05aqg;X(mq$fxs-TLGNGl11do^z)ej zbyh|4sl+n@Iva%o$n^8W0w|C#6u>A?ev|-N<5GZdoFLuJoL?^%Ksv}8B7j1W6%fFy zNPbv=Zjk_D@+X75dvA_6E6 zFN6iKm8nL!k^)EsSvqW^!UD*VZ;KXSB0MP{62Yt>fJB5F5ujW(!es*ZyvoB1VF6kp z*=dv~|NIJ2T%dOv2k0&0@pc1G%QTb_ih|Yb=$T%62%3bDw82d2XhH;WDF$Wp8)|TS zO9Yk>O2SA)vS<#MrV(i-iw4q$z#0HWxD;ejKcAgz2+A3z)@+3bosdkEd0g z;D&1#CpZiz#?%|L1R`t^3D6uAKsmytNfdzqGC|f*0VK$e7Qk*e$z8qXvXKiA`1=hV zmpdyx!B&1`%>9K46G0ec(a5T#01`o#KmdgZm-_e-0c6Mz|AmPOGO9|Ba#>%@WZZ2W z>Ho;wdKvvm*|hl5+kCX*InGgW8c#HK{=|ok`9yjeW-XboyKLmQg9WCdk*LNJcD!Wm8!M{^|rzMI;*ms)i5}x+Az2Z&!25I4rWwWL}BX? zEOKufEUd2?%)sM9ARn2w5R42L+weM@-Ge!fsOt>oIm=qnPh6z`_Ydz*&dt4=I7*o{ zE1hu`!$e9>O-f74pc5eSr(Br2T9<$6_jJqiuh$jk6-OgwWnppRih^SC?_wkr78Flg zxdOMJdh#qTEon9)Lx{AD zp})x??JVrlV(c?%q&{ae4u}ilB*0A^Hwr0^^>G9BT>K=*lpq(QLcEr=q$MqBNlRMN c(!@yr22-Ey)4s~&`~Uy|07*qoM6N<$g6%nSQUCw| diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png deleted file mode 100644 index 14ed0af35023e4f1901cf03487b6c524257b8483..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6895 zcmVBruHaWfboaZ^`J@5OTb59uN+UwfO z>5DKPj6xxy*f-15A^38Hcw8gS)fY>m7X^~)>WdY`i-Y7Ev5tB;lGU`#+aci!MOUUM zD}qsF_F|N>IHn{!fdYTV_wX|;<46$x9(d2I{>ArDOEMG+AD^=P{ywF-GrY99`C;pd zTVmI*ebJ{Z?*lK5{2OnL{2bsnz#klb&V^vTF8LL3idsEt+KcA+ISDVmw89n=b3!uh}YH8Am2dcyFwO zP>3sYL|70%XiHU}0Zo+(MxFf$fG{c^GK8Lk0nm!?MOUlH=$7@wQ=P+?afrb30+O<` ziTG*r2zL#G;JREn?w(KwKTW>kAG@~nvD;BDbNA6Sw3X7nOleNtO`EFE_iw7?Nk@V% z2nn}DI|Z-=FUSS{e!iMKGH%z#^FftGb+nGAxybACovek#YjQ#vb&d*p+t1kJZ`xQz z;u|ZlH|p$>-hl#GilOt>$n{u0Xl)T;>j-tlI@@Z?Wzp-=)#G34?74swCQ~ERfdKmc zFhPnTvx5a7>%ShCv+=IbEiP%zhTLzjnoMn+{p#7s56cR+1Ip9!b!Tb z`Sm7~BP+1z^;S0iG7&)FAn@&x7D5ZD8A|Rn^8#NH904lXb|d*p^Im_M3cx}s7!4)T z9gHH`t8+}w++;htxjC@gx{~KPlVjj*{S_ks3$9(+#6u-Jl&IAP3pu!CJwK#M5t6c_ z>9wdD74a&~(E(Zk#1U@ZTtm|Z&dTxVSzAiRZr?zO5>r03qKN!s*CrAGLWn8vUzShH zLj>)tEVfOD(e%jX+M_)bim*#E5_p?Gy16VcdB?_AS3UnYnfh>x4oMP&MNjS{^B>++6>|-QpN0X@X6L&Y0v_nr&QpJ?Nedk76e$t+1QRS1iuh%{F%%f!H-mR|< zQLG8Eng=h6w*&uot15mDdp?pMw_z>mzOGmllD0RJTU#1Lm&egEdG8hyS)~+JzIUCL zOasw+)T%|5zrIFI%imD16;(cBT?v`6d!z2=P1Pi}_cC zaY){_eM2i&Osq}6Oy>Y2JfPjfx74>{k`N|n!sM^n$$Li~8z=DouS%NFPq=6oaadk$ z0*u&FPkPm9z)j6IfM-M)d8(pgV+4M-S4t-d{CpIET*U$q-ZNqpnS{w$epknMM*J)< zPm6>bel7I#uL*$fN%fSIg0yd#CHM7kuV;h_C^iY@0i^Gty9+J2aLrPcO&e_I4V!m|%QLzX;!0D_phPA9;f z54Vuq!_U%`L{EsIT^4|j0x3HRvX(Vc4%<2x@Oh2+Dn;)>o2t)Xj~&>w&Vc`00uyVP z+rjjLt~xt1(^VjmUESy@cLz5nC)L@%fx;yxhQ-ro#ptR%A^-9B0u$XgK)sha_CY+|f}c==vHJ zIsE14R^;ECC&mE-m5-zZK z+8{Cl>U!wJC$s|y>+%=$e8oRsp!aOoBrJ@MF;SPkbU$$FNuOD87#(v%q_;vE<)g{{ z)}HI>svC+uv;Os$twg|H_&AuO>#CKsTo>rM<9BT$m9M@;K7t9+k|;62$@KkG-xKZ2 zhe^_oMi>opdhOmo+KXR&YGro*f{q}Ep3j$aj{uxYnw$E)-`r`v*$LKBT)@uM9ye4J z-Q#1bNUOU9;6>Q;!8^3)TN3u@@%O2>^UtqNkTbvkW<`=Kz-yfT?N{=`iBIXo`W%cP zOF@78`!8CjaFJ~gEr7rbg{*#HA!~+a`8W%{Bz>w?4Y=;y{O2FrCCt!4 zuy^g+qyHvTAKvPoK+M_<8JLnR5|X`g3r*75jg0vjI+5}2Tc>@aBLzSo8U5@X@4sm^ z5-ujt+fn`dMM}KeB4Jx*2>uVv&wPi8j_zvT3~}C%Z`$&>zV&72aX)=W3XlNt!|X?Q zQm^Au32^rJ-)S6xb54f}0OiA!vY*2j%^E_@&@x*=87F{e-s!CjZ|nOe1f`XR>1IGiFlvUuJSK*t=o+=Yf5Tc5TadL2IQF() zEi;A4K7Fc758(rGN!uFr7=1be_I@-cIEM1amN~NnsQVQ zGnAj7{i)NE&jag-b#>GhG`pj=Hqeb+VmN|mT#uW%u2aZ9WP0=nqgD1a!xX1#>7~!l<@*A zoYvP%oqLK3P?~FShX9z1Sqj6ovlDNLrBCj+nMZO-0B}XA0IJ;6%pJ)C?Fk@Zmdxqz ztUAO8CbdHVQ=%<(ai;xq23`ZNh1c{dOsDraC(;Gp_x{_&8?%}28UgCOUzsT>BkT#_$;_WV*qs7k zaPyN$mvj4DM~Poi24V76Q+NQ14?o+kc?17edH8v_RvLR<5W!E8Nw&XzRMg*N-BY$S zuzP*nCBWq5k(6tj0?eD4;4Tw{lUUiyM?|NRtpotF6fZvOQYu;~fC>eGYcU+!A^_gI z>|g&+Jh5H^5!z*f#wXumUx4XTZuC;;xMdO!D9;DmFW!WFarO)uTvuikAf~*Cy!Q2% z?KVMgd~=fYTB|S$Fu1;)-b?J?fAZ6hBmmb%3fCA#XxAj1GG?%S0g^}b05|kYcetUL z-fe4Y`Q-Vtqy|P!>5)U^_~}z_aa-{kcrCnU&C4&rJ`sE|B!wvbkd_OtElu>j6jNVj3Vxd?2fw$+FBYCS|S$=CYSc<5Xi_2*; z&gOy)`=+1ggA3j5q=$gF`8aHR>b`OQ}eQ6h8^930& zTfz6uT#6in{r9oABIe_L$ArY#I_=r^EJ;?q_OB~WfagCwZZ1HRKmdgU5x6DEkfO}< zfwzyo4LP-t+{?-ekO2Z@S_?o$$g;aAA0l1(9&md- z<=AWj7QQA=_Jw~#d#mJ4?b#K9JJqf<0gnCn1538001ANs_@tzj2-yZ49YM<%;c8eY z$FZH)D*9o-^{baHqyo6OF>A<%3Ni|8q&>{r+d^jT-r}%~5L31_lEnvhk3OrL;pn_Wlg^IkA4rJe+-a^UwY7R5qH&49$;zI8q6 zuFa?QWFa#_X%0VCHo0|kEkwel#20?HhOE_Boonzd$ROVHrqv>s49lswR{|TU1x4L9 zYWUdAHK)eyY$D^fHyXs|f^6qRnrJT@3q;P}(?aHg7lc1M1q}7Ow>ObxkL;#qWh{6p zNoJ@q2lV_2;LW5yv5(xor2$M!4PBBnq0SsoCnSIMQwPW-xK9!YXN?9Ewl1gu%s7*t+Bg35~wxOdVL z_!J6maK$|`wmvrlW(J|R4Qp6SZiZ11h`rAlpa;f+xk}ztOG1=6^mika+17v_cwJcm znb@*{glqHQ_Z$<{mdK^Ro{!{5S13qeX|4t2CTLg$Yx3A^XhS&(#Cr%31fKxLk>AE+jwroWIAJqGD8O53ik6ycRr{+uucnefYQ1B=j?lwCZCL0Z!rfHSi)rM z13-u*5X=u3)NR;&OIH(34)$~;+?LI^bTx53U>L*(G1V#y+YdHhk;R@Ll=i?+OkCd- z%3*SEKUbcW_h90>pZQtm|g{tib$ zTp&#%&A4L)t+45A(Dt7dVJl9s;bIyEC|u)|eC+Xd1+WujnF-*8d}{%+%uSDM1z{$R z&7_>g#s<0G`%Nz|CMXD((fWe2kIJa1h~| z1dux=-=+ZA>r1lqv|jhme3Ej-a^{v(vpkqY`fO7a6BRX#kuLv&l7`Q~y7ROYB*UHn z+5!+@oj?G`=>;nRoTL}fw?`M#BtWKv2$vOLIJmo103=_5DFBm)B`<7DKe~FO@{*5NG})#;LV$p z^ny_Ujoc~u*wc9ddR8e}^0QYE$@Iz9$PLF)hny$v0ZvsH#-G7`E%D3)bN6Cny)?Oo z+qSv+;8rB2z(RmV8v@wL?N9-lEd{Wj+o1w%wGhA#`MdzbHr2Go)TqJbTt%3<(;lIm zAUDzU378K1rVR-b78b-Utqt;cXu%;L^r5#m;S(UOxMfca@Vp&7^2Kf$-2R72FCZ2X z4Uz3AJnS1&!MHIBQ6xl$8R)*9=6bq&fnGYy#$XFui~gt_LO97NkaamPlJi zG}q~I`=rPHvkwCoH&ISlZaVxMHavs*`M}$I$W4lzSC%}s2RCQw@i<@HvgZtV*b$z$ z1usHku}*8?kXySDgM-1OS3 zUTf%8r$G=$z>}u%up?*XVrolC&vhjv5k$Ci$41h-vY7O&P;e-=MkR~*S`E2p?^e2R z2iI-Qp)^O8l4dnAv4*)FoLKDvZ9bYE?D@AANMDDx52qZkTzGY)>9HjOKPle;xH&j= z@eBOKOmjv`Hyzps*NFnc=^TJ|TSRUrK%GPVdOzN?a*|%a6f$NpF_~t|=CiIQ=k0*a z_gF9s&CV^f?WRfhqJP7Z2i@Zm5rN+@gx^9pm|1YoJ~}B;5wdmmL}=@&iPu5z8@0Jc zAb{iaf=vM&M7XvE5Rxy|@!k$I=PsOZhtM{&ZTGnpnJdqF)xt#!N9$N6F zgblJ1XdAJum&oim79o@gW2kW(w3Y;Pl=9zrpi`& z!mJaI$>Fh;R0Qh?H=tA~fP;NIicACUUhq}tw&EHtE`c(si%&^rOkR(5#=6rsU|XEx(9YvlOxt7`7r?j;Y@Ha zPS9~Uq=Rp`VM6r6xi!r4g~#X|fyA-jV9L%Fxb&&yzc@|W8V$kHtq`T!J->k$fwT9f zIY8D*dwEf&fqFE>)T?2)4Pu@N7f&9Xf6RBr>&*6g&&!c~>&O}H zr#}qk$lyMl5QDrSl9VKmNn_^Ee2iK3e)M7{i32${3oSk1TC7gGkDd~w?cAO{}c+|2tHX7 zU#BJGcQlcR%3^u|EI#sS6Kjh|H*En;OH2Zj6;&!Hp+#ASkepSggI6tnD`?^Do&Mky z_(gS3!Fy7-66*lojXxVy`EzxYFjw%47oscmr^CW}fN#x@ih)QBU|84q*gJzJCZ~13 zcV=bGip38P%u7EKDP8$aq&)5O$o!1&t}Dv=F{)U027y0E7G!>hpM_^Fehd{2TmRyarwi zugRJiU+!L#tDSf;g80yf8j!fq&|tdLATY2y^~;e|A@Du?49j3d&XV1QyT&!b+bIYy pii9&6o*bz{@b60mWOsVP{|BB8eXZ|AYE1wD002ovPDHLkV1li`I!yoo diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png deleted file mode 100644 index b0907cac3bfd8fbfdc46e1108247f0a1055387ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6387 zcma($WmFVQySpr~^b#u_OG=0|(kva)DP1B+cP_AmARxJ*NC=Wrg0zUl5(`L)gp{N- z(%_OG?|Z*r_s2c=$2@ap&UtF)$(eXP9W_!SdLjS-K&qjxY;ZTH{xb;h@8E{&N(%r$ z+p3|gU=%dFmq%!1q&9_NsUvvk-GvvZjaIJ%uU(o!Ypc=Wv%E8e<<)SFdRM{tz(T@!nKT{;0jT2A&dgKu3 zk|GDUX<&73+f+CnZza0G4g29@hmNkl+2wP#$0yi6=u-4CD#*a8LxJLG9KlkveQ7v} z>E#)-tL=xh89y&5li1I!>Zzc!_i6V~nKP^5-+!69FtnX*f=*tr+cf&UpZtLBY|wv< zJ6r*Z5374 zi$7+B3A@szy#|*$Tb~kkzc_N~h3;oe8q95K$w@e#5FRGcF}wXTR}t#^!OnNc>Z52w zu23YrlIQY7UrLLcFSW5ctMBzwrTz=X-m{1Y!*LWUbO~;u&&q8Lu;wlGFqO2h4olL; z{rpPfr}7f=Z)eZhFw1_ITpft-VzPF1CHv-W>u;OCBJBEOEn$HmTpFjX=xN6-H5#V{ zn6Si;q3V*@lFMd>H8;M}vOp8McQcJ}^bBfV`1xb0g0`9ZZa9(wb+L_RGO6wD&I8ouM<}YVDFU ztMSz*yMDz3AkS0YO)3_lYDarEUyj?A#9s@-ln${-1Op^nD7zREi=%4Hy%V?=YS7G`L@>`3kHM4eAD%)t@F};|C zfj?B^Kox-WuPMuDp2=LPZU3Obgnl7{dD>|>*A`fn-0|^8uAHJz;<)tkTXA8lI&dHt&xG(4Il=e~QNN6o9YD7H{TR?17eM>#Z8#Y@_=7fZ?HkZX8i|mEGs5mR`uBi^ zzFh5AG^3EMyvpx(a*)!eOI1?nPTn?v0Ly$)KlQ16Xfrzh+}+Ua_I!5XU@ciwrAZ>O z<7!MU$n6`x${EB6YH$hWOMuSEw+72Lb~rgO*Yp26LGdNp*;^;HAD@(SAr(Dk;j7w! zQ>!M4rxUFYn7E?v7)2q)2rJ2%PY>A>-1O7bY~nt&n)jYnG$(iR#hvlih1p}c)I+|I zy^C;=uIJImfY zL~pm6t6Zw8FiOIY<1>EBS(<5`Cv8DBcZEpTCQ{@@-|2$Bhi;6H?Pofq1Z%b2@)&at zUA{9iaqi62D1|=T{xTe3Czr|z52P;M7EB|V-ss{qspYc0Cj~hUUURef8?i5H?e;kA z<~qW5`JIc(rCLz_oJ~>x8O2IVR%>+7%}`TBSQt%i+m+4tV?z0(?5cf&1v8cNlz7Lg z%ZS>-e!({r)+sH_1+QJvE5BqOgmfK_$X*P0*x6beoRN|0FV zBu+T9^1E5}1I>g&wC|Bn^{(R$!_A@+E4<}3n|QMU=H|GuQZRAZ+zSZ}SS{MNj&mi0 zRY+fp&8IQn-}zGeIVj+qntrIP-IpXF?2xAoyT|i)X+@HL$+|t{#ZAvBrd?L!=9aLy z%@CY;X7U41O6VpHq<1UBk2vi~afo_h1Xrb{vQ%cE|Fvi8EjFCP^~ zabJnB#=NPyBD*BaNSQW*VI+TbEmlu2&HD<4U_UQNUR_`K~u~XWideSoLc(k)vEtG^CT* zG`Zdarw^M&6C=~oi^6W#WL!BMe{E&Gg9Arbg2gg;cO^sJ#+L$ zWBP!R+lcV(p-B#aK<&Ly>?*3fngF)TwSRSmGJ!zET{Brabip#AUPyChm}S9IFG!l{ z%+I_?Cl?zVm9nbGSU`Ksi%z1{vEPpxnv}!StZLIR4yl9y>GM~KIIbNdVs|xsuCpX=J#rE`8<@v*FO%Lb)=#c`~s7W#9EDhRI!G*VBK(y z5D`)jJo4o1={q}Kg%YGhdH~@PGate(xi{(OiQn~MMSZM;!kHNh*1-e<+YS5-j3b?2 zq7SYPWMn1a!^Gqxr4d1gZ5G`QQ(&4Ag*OcnWO}~9rz5xeE3Ycol5cj$@jggn@8x2* z)UpG-U2|Av7a)Hi=b^@SNp#`PEDfswF$nyx&rD*+4SF}`_U48`=1VnBn}aEm{Funk zSWQuC>r8yUkd_D(dKEqo`7i}}{#+a?O4 zDIg~&^q#d5-Ji>``G%gDDzV<~+=*qePTy_lbVjK?!d`>ygnhxwtyL65_G4A=A}{Dh zq;iS@h|Y-wJdeGj1b{KBTkst|klERM7*Hwy#ZO<~Q$5~GzC~WjZHz>=z3~>oAVbbv zzmgOw2JQ#Kv)GT9dwrXGJKz5(Jw%&rYPjfi;TI|dyVJrvaZ*ivGRT;i>R6}8B>7*j zbJi0%9UfLcYKp+TU9qXLSp`rm`)3(g6YOdHa4cv2Y)-JCPZ&g1Z*%F~T@dw@_HA~- zxeq6NeOi{(yh(ziMZ)4yIfDP6nhTg;)$=9N_-{KO!ZB@c@e$(SVH`%0b3YF`lgX)? zmPOF$H%(2yD*LrQ;d*vDgW=s=2h+1RYg?DCXa2gXNT~W+Hu+pBZ$bO8IlS+nqXw^| zBM2iS@v_S^5P@J5V0gw2hamKs7Wro(xWlv)U$%_D)AA{;Mb;l$7?FOK*2{U?f_M(W z4#aOFFlOC*Grkxzi#w)?qgNP48e=dJ*`EYNKfLm6BlZ-j@VMi+{0T>$Y6e%gC|6;v z4=~J;U-H`Rv(<}l7sEXpm?7;(jXl{O>aLca zP;<5GjkKb?74YTOqJAtFKzq|v(-+j{(@?GPIKVS95tsog!>*S60XwAsnYHqG)dW<#@2UIte}({hi5+*r;^rQeDpKps%Ql|LRink z=CR6^g!&1h1Ks5JplDey{0{E~MNPgvQNeH21%lrCFFh~_7#;b73>@zaFo0B}hXo(J z#OVP*a2!ZeK|x0LfazsE0=vAP5xpQ58{e}Xtzn5B`l%b)PM2PI{UmZ`}XbW%4eE=4-VAbQ|zojxNh6BnLDzTlx-stKQP0|=pi5R7qw0g}ivih_z$ zN`Pc6h9K3P5vFz^s^};EaGwq5yEdpH4Um!3Lju85e*w5hg)|yEkihSklp#pqhWjij zaK_T%_)PG>g`7N9$25qwhR3WB{&pp8G2;J-#qe6%xdFHO2AeceqW`Q#`J1X4*a>V4 z;Y4EVTMA!^vxOA;$ZDCt!CPots~0yn*Erio(G!n)@W*|^D_=Wy;f*k=tF~9Zmr)dn zCzfODoJ@UXXs>1NP-A4#YmmhGXavn<+z_gJ`>cZaGo@Iz2J)=M7{{ zJ;n45y6T86%gls;?`*1bFl=sXf1H<+2AiBU`}H6YM=+eFPoz%Sg=s>Dva{ls1mJO? zTWP*i(U7Ec^3%Z$g`f%l##*mSt_wOa-d&(0A0@(ms#pY$P8SX-ZAVg)> zpsk00`SNH__*AQ#=>~|-wScS`e>RBCs6NsQ18sz`Q({qI(fOQUY10Mt%YO^v{>w>TEBSR zi>oS_n(}3A8W+^iWG~}cr3Bv#s3W>CFUJm0ejS>=V^X>!UmDV@|xH@hWB5yhc zuXagN9&cY%tMFc@?PqIxYmy+OSGU`O5gvK2Yaic7tFAiaz`*T*dLafG4tz~<{L=*n z1iRA9k6#TYhCWcSFW6P4&4yOea4q&Fy6Mbkfl&!{&@KmDXMWs7;2Q2bRU~gBtDs>o zNeUgzt#lWV4oq=C=5{Id0)=a+u5HaCtDZwXnX5u!bO%{LbXF-L40}KeG4lG*uU{E_AOMMd4ch=Q9&rc=;3fB`I@EFBuF!XcuT783*FH`4zO zxZ=AOG#fzwnh^u6!|A7Fqf5u{$IesB&EF?V9g5dyhcmbVh)|M3^!U*}qJEYbGFaK2 z#0I`dWniJzl~+;sJs^jty%7`^Yv#{r+=Q<#CleH22pEWpQ)lwX9b5uv064&fPlS+b zqZM<&o~(2`QgUJ$O29zuo%|4(uP+zAeibd;jfc(zz|+6+9EUrZ?#^|ymX-knV0Dsz zFn=Bg(*p-JjWR}+{_C#CZ~dR&on|-C9&{&ij%~0x9gtgIMPCkr_rc{WE_}pL*bCnZ z3d?M3AYq3)iUS7jPOFD3m9DVG)E&SJ1*`YXzZQib9R(``({n~0aGXEhgZnJU3vy*N zlEAeqef_?@nqICTH{?wuZFw#7F{`&i?NLpf<7G2noyziDxMHBmK=Z&P8jf>~^fSVF zFmD1h)DVg7D8erkb}OkfElv2i`s#7j5-;7~&l>SlgLRqNM90B`oFJ!3Z!I+~g7^$B zkD<7Y^U2QID5DVT!a*uS%0aL5KAD#Lk5^|WCC!!OQcFyxCl$386q*ohKGP#?pNL0_ zG0d|NfxU%N?);5-{u0rA@S7+4>7&sDwppXmJaj`?8D#?9@k90l(a-Vg>E`q1zXh9B zEsyo)21!OKE@yf_^P?a!d>O%I$~z&Bg| z{KuO5lVh07O|keMJh@ks$3EfHm`nFk6qNS&_PxPbKN1c~Ds8?;y>OzV;B0$XVQ=LQx12PJ2~x!&?qm%Tl)eivoas}<)&`&84*`tT{?ou45c+RPjX;imIsuwmXJs;5Klbii3#Q0kSLKcW+Y@xKcRce+GJ-RTlpMp(c)D`xrv zd|#_rj!Bm<&cad=Pq($+uKOY#CGCK-8EXOLAo{LJ2l({+_%87YR(e2EErULI*gm@X z*m6LuczdHTQHH`3=)x;unt9KH-4duW3nu}xk&Cu4-DS4wjNG}S$tO5H_$l1*S3Go6 z0HH1rN4WcDUK${}+a@ICZ(ZC#*`6h6EK7)q2OePook_w)c5%-9AxwoT6E*>!XDxpM zy_C$yP!`aN2TiCVLn_z`_E((J%LUYuw%2%(GBL3Cve+5zmepidD|^#$=@2Wfp!?NR zUpV2SwaMg68}9+`X#n-Ust|TK-Qk@HXu7dM*@>KO~@YA_S!geT; zxLp>TbIo9^WI=ZuT?ErRN;LqRSZX$7)+{MdSSiDnSdSwQ+6Yqb#nF393O_Ow-rRZD z1MtC55vP=~4kwe+$#2C8b3Q6*<^!T_D^X($HS$*Ns2(pd5~m<_QgfsetRt77rwh}yjg#yx`@p|%;RnzvAN8~6i5D;EQg*azSU-+F9W;M>-%sM=r4J zY%}@{t+!2883WSGMgw_85U#I}O75Rr0Q_D5;Du8|l@ zHWBq-r2&(pezi>6+daPx-qwVIQ3A6$h}GxIH72G*;HeRgyXKy?Uf!HvVg$M3Vs?lo j7HB*8-{6~e<}KKy%g|C8?m&3=nE}vH(NX@WXdCq(XawjJ diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png deleted file mode 100644 index d8ae03154975f397f8ed1b84f2d4bf9783ecfa26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10413 zcmV;eC{ovnP){+^kJY@_qlWNt)byXXcl4&di)UgOL4U zf7l=Phy7uH*dML-fsqKMr;DlfM>yz|;&bpF`{OQzgo8jbktkySeg~64fbWuHz_H+% zO2F)JwJEE@HLSkR79_Z#oHbogc3dx%o7^AeCk{b5(&1F_9NvTf!DryJ`XFJT+JS0q z&?sCD-y=8K2W2PRhjJ3<`jzFS2UeBViE9@x1RKUQCZdv7kl1SX?3WZMS(_}*GPxT+MhW0P|fyhZ+Qq30&o zK&_A(Oze8$+U<`PdXPq;v4_f|Urm8qVAY042UnGp45})9cTiQyEh4N`WieG?WwHFJ zL%SQEJASBPNL8tfyeEVAm>Ttneh$6^dT@7TL)6K`4dZuI$Q8$@YC7*NxE8o3xHh;( z)oY%paC7#DbzBq#z7eX{hBSaAFX=&XZgM%%7vkI`tW*yCO_Yg=`yqnAa-v2eeE;?> zc{iKw z56$?22D^!CP)@={l~{!+p^?NV4J00s5s~K!m``K3Z^mK!w_^!uRBfLTqF!aWIQ-yF z+-+mFw$C)OYiVHDrh2UxX&Im_YA#t%&~JYj4^H@@?c?sN*|d{1z)fXCWK#h&a-j`x zMSwIVr!Zx+>*mUE)45>nPAFTm4uSn)0ywG_n3eP}spMCtk;WQXTc!Xa#?G<8~9?@D4_J^SH8;MHSdkm@M;{c4Zl4~|K=yFf32q2}KbIxDWFpb1y zO+OA&=Iq3=s^1(B1GFU0ED0TN)1GUEzJjf&cITr}~_843H9IFf?D zpy-;D=W+{Ha$5$7>!~TGM>3^{(aM!hTwS-Zu6}T3B@Ohtm!x|WXwD0DS$2Sg4MHki zT4wy)C@!)S)O94Q^ENX$IJLgcuiK`aOAMYnR<7i>43I*17(|~2Z^{a28-tFl06j}G z1E(L_b%g+AG(2{IghMo@X493&wrmJ$)etG%R?khj1IO;za&76!!+2C}`5mZmW7T)d zdc5TLAso7|4x4fu(6j?P@#13#aX@*#Nyh;YpF8maDO(w~k+R(hKe!7&`(pji{+WqG zRNJD}1i%xZuq*IN{U@la2#gbNVFCfAchs zIJDcO;{ZH`Z=Jz5RkkxH?-ZOri>KGuU75U|b7#sb@!GV{ltwd6tl0 z`-tj|)YKcR-o#ogdg%auyuQ|?Hi%I3R1^-|ZB z3w@dmquBHyVR{7VswXIVTX$?MPH4+9kb2qjlDK$t-RcV{VoZD69&BtHN{89>gQ~qP zJ3uX1wj2^zXGt+iUU`JHjaZ|tY;IN^;K@-L=fQS>Y@uwVEi&RUN?2Y*+sNids}(cC z+40kwrYD*P3GD#2c-goFwX_(F;ug=ctyz2p&FRs8BZP#KW)rz1wGkz3b++zpGX3NIKL+e&!v|_Kf@T~~axF4tuT$cD=XZI()UWvicEV_jFqjbw^Y;_9AkJsqs?mSQ_V zHd!_~?Uk)r`5Rg=yAOj%Y^~TwjIt7{g{Gt00kYMyk+w^ZgMfMuZBvVP>lJ}>TFiaQ z6}$vw71{x^*|Ko~^_rD(w0N!+0&330f%Q3TNHV+~AX_dQo92j#JW0ofEat`()+cpU zNK-<*Wh>c%oF}ld7(cPM7T>>P3+`N++2#S7TwjYH+FeDL-}5iew@%rhE!V8XXvx!0 zTFweF>(f3j`6XB-!?_??289+P$hL!oDad&d`knUqYw_}zU&NQL{fPhk`)_>p#vk~F zOaH-9ClAxr#e^P5nv&DV0je~`L#5{FGh$URTHx9AYn@Acj8H9 z-fn2Xa=Bbhm#_bhv)?!+_&C~>bovC&J9ipS=gMNVj42zRq^}*vKi$01ti15vyd!%p zUA9JO)5+CkcwA~i2(aSSaRpH~0l2>#}`U$mAt<;*`UUpCUF!4<_g zFf*C<$Rf;^y{H)XiCNlB=(vxmae|1Pqx`~~S}Rm0li_pUevNx<%Eh8q90Q566YDZZYFMh0VeMrAMOVe1 z|Lz;ye`{f@1!x?J0yCotz`^}fMr`Fm4fEt{bxGcZ@CDfQlmg-(RljEY}^PEkElrDm9b@vQz3{qdC=2bx32OI6ixaob7Peg<(shE$A37*Y0*ydf7hWB3l zfOPA%yE6dnF4t(NpuypoFMj$Fe(uB} zYGE`j2L$`WNWctZJGzc_^Y7cZ=&iGKe5Qp4N#!&iijDjXjTz(3xiMo>J=mmazv7G# zF};w)79FkiA@1zpCm-spe1PcGSD#bY2j6kZTSF>x2d*b>5aJ1Q0i#dXZr;STA6&qX z?AfNYN-*H~;g8?zcE?0p{`DpSKBZ+x+2NX#R$#Yh=T4y^j8P-g+?ON+%kpw5Ksi!b zOAq(oLt>AA{_iWD?hG2?wJ$%XV>2K8a2fw~=WnZlqj?=Lg8tUGU(+#}_pV&l`FXI2 z2R{CgjGSMfif5%=Dvs=1Gg5Q<1A2u%ogU0AeaR=a7WglGq9Gm z05rN_()Itp2xw&&&f%Gd_t?ff9{`jo#qQFme-Q@S8}7!~yjOSWsy>00CD&oc8BE zFMG|E_M?KjbKQ9%c|x42azM)$4)-h1zrz4(v;}}*K(PA#cWCU;R^U~Jl3;7>rw{Cu!{8QN zl(B*ZEn!VUSbEKv??13(3(hAM`|DqSwpn--f-*wJC6w9N`i?w)2q&I8VbU?i)Rp5$ zpRbmO?ySVUW0vO8F+m{!u@5;7*qFB&61$hYbWjGt9T07-U^P?#05ata{Vwd{2a}a; z(QWDK-j|R#Z<>+y4)Emu^ECb8n$m7_4%f@(9^8ck*T(DwCIkV5Cej$Fy(m5INbk)B z81_|%Sz$1T#tN3wg#Zy2eKhpDFrV~OEAFZrs~>OtfgjpaWmJ8GEc7e5$ z<-7`0<%3Bl$~A83zX=m=j13)K`E?&RU1#)%u;U-p*j;=g6-ytEUsw>Kreg^;rRu)?wAO})#2n1X6G=;eY zbpY#7JLDu;AE2T%dC;~}?3TFl3JMDHXKYCH0n`pX@o;Z)fS+3mpgvpH+sc<*x z1F}9*_-oA}DzIg@@Ei1s?3sQ04(rg@i;xN56+FJ0yx!{~|Zn%b_xqcb^P%5t(dMXW@Ug}*T&pN4~-o|+0Y3PH&pF}W=|bT0Q%e706_}svCls?Dd?;u zzf`BxSd7-LQcApTHC}%70KMPb((ph|^QvQq=sA_wK%P6L#o@{e=S=Dp9Q*VlcFK&` z3z4}2a!ZM6K#x2yjjU$pQYbW-n|+%|^QNhAEZ%^{+o;|Dp_Dctk{ReEnaG1N7!M zUvln?NB+f`^cqb${^jex;SpPlIV(gVl3I2ghz8NCZ=kUwM+yh%k@0;{mh_r60fM<7 zQyUMG(-U4kq8@)Rcpf7Gs5P<|e4I7+Y4)N_=QfSdz}A0i8M z<9|WJh7HjV5X(eFBM0>$=J8u=0pwnoia*!0$bca|pm_&(<4!rrxI=n8_RLDeAtY}2 z=*KHo>(0ZuLTbvfXLb_qK-^8I+%| zUdG%Cl=sFd>;Oyj@<24U&RhVc(aBVo=p`QzCVUthI@4N3$j=WxTE)7Iqpe%ok|sRnzE-FFFLy4v@Ojy zAh^N;M6&#AA&{i2o>0u#PM074u4E9~0hJ6dw^~A0!+7s~xzzXy*t&$}*`nH~ad24Swg^YQW%SiNd)(;TZ&v!xo_w?$uA?IrfP_|`m zEQFQk^)0w$mv+7L-8Z=N`c!^^cB=rCZUjVG+>M2OQ>B-YZ>N5giD0_7nBKcn9Z(nY zVT8K$EKGZqvp|-)wRvDgk=|8G?b5E#u3g0gVLJp(fT}bAG6o{JwYgv&4v1g=CLIIv zMIDs;tm=7)QDC4e`P->SW@4!&?~R8=%fD+wwQ%fNlz;`*m_7f4lZg zPs+CxK;6mf8GGySjQUzZnze5S&OQAymYz5)_&eH^bn*y2)>B%~UnfXQkL<$*XJ5rj zUfj!-MX2_vYu16CIG-E`Qa)zv+b&q$i!-$Vw2cR#ICW+4KtvPw2|#OCVb?j+tDrN5 z?)7#T8bCM2K|x)hC)UY#!K_emE(FoWtx~UdHXaJ8k-wu&kn8+J-4;A-Q@)_j>(YJY zg?Mu97A%3iAvFK5B_WJYJ=Uk;DLX5%Z$S!1DXUc!tzD^_ios5qQXIOg3I}f~YCb`# zRk6GpUA2J+pg4XtgGkD)Rv#BBbDlJQ4i`ZC2o9iC;vkyV;Ys8tPL2MM0+eN;g~p)} z0w6LgK%2DyWB@z>N{>Q5fDD62D?moT1F($VrU{S^crr8~0`~=JA&cjHO4_~;Wq@Nr zWEemQNj!S?^ny4@yn0cIMFA2Bk;MTr5FUPj42OpoAS2;v4v+wNsNimoCijJ&noYkkmt8oOdws$f#{!w*f?U)Jch8E3A=KN%$ z+~TWqXo1Kw0L2&$j}jo#@V*79M#G~7Xtyqagu%lBw2>bmUGSvS8y4j#ei=rgkL1%f z@7Ap&y`32$qxTGRKt41A?~MHXhN9HfKQK2YxA^)%Jnqcg06k8QB}t7j8Xmm>352H! zplw$Td3)1=B;S71raVS|C4XCE+i!)Y)YsxC zwr{1D2jEFPc?7RGyqCV#udVzd$BRCC0H?lu6o-;y!s{o=UxTz0REZZH+>J9|JAt3s zzmvYE+Eq#889~}zMJ*4&lX>bSjy`sXzE)_;9zIn!*Yltns(4batkeI%Q%T*?_v-l- zwzrm3eQo2^eRVjbFzZgQkn!Qr)?Qv-9>(^*n!7QC+Pie_+=cw@9hkfB2xJx-vh}yA zTVn@TmEvJ#1=R8YJWubbp>9m4%JS)VG&LMlUV!KB-HunhxDSsc$As6z%h&U3vo;k{ zO$HcWI*2C`VCj2X3Q12&RYlshwMk%k0G`!-Fx?$J^uSaSsW%wXr8mn$ z;~AVgF)0R8iD^b{(GvruXp?%J)1xrGDF!ki=FyCE)MFsSVjfM6Au&)Wu}Bi=^k|QH z6l$achszhr(CFcFXd8EPGdXzH1jvCdyxFM(++21qTCwm28srMxgw9+m)jJWN4erJ$ zfHVLZMJ&MMe#UxB{gzxExlj?R><7D^?>gd zIsvP#Th0rRf$)HO7NyhMYMKBt93Bp!1R5YW1IR#lv;!2+Z+#M@Fq;1OKH8?<-rZ>% zn<;qKH8R~3_2@bhB`p7*PXFr}owme&VS;Ayb&TsY1IP$?02pEJib{@y9PbYJ9-F0^9DWM#x0cd9E8d{Nhwu7<=K>8+N^$ZNE0c0dR zf&mgRx77?FBjITdP&~i&$sz#7EWzl}kQ~~U7Pda>u@Fr0w?{q5-~J?^euK+yOKh+@ zK-wS@FtV&4AYl`uO#r1C4No(GOn|2epc(>Df)>{$ZJ_HW%?-am+He4COHWJ0KH7U^ zJ}zBh%m57^@+5I(e{q>?{I1NR0BKHp2%Oha0+beGG(36%GGJC+2~b6`N$@BEs@DQg zX1pBgOSE*}Efmy$I&DJ>^}KXhp?36ES5Hqr^0%LO&a^z*cv>b}Ee=pNt0)6z*0lp< zSV{&gYQPJSfhidrK-D||#TlBCfycn$tyX}D>xy2C#ZNx60osnWp*w3+F|xu#VTHJL zgq)pW3H*WRxp}YA%HipiSp^_NAR?fQ+R6uz;rTqg02z_b!w-<*@IW1C1t<%~d{$u5 ztf~K`ZN{~oH)~6)SfAzrbq8wx0#N79V@ObTnO>*{L{8A*)}e#1H3DaS0kwz1l{q{-VIh)6$u;94s{*9U z5~XMZ$oNb`HGoXWBy0kx#3Xo{0hGz&9?~NdEngrPj~y9BU6+T4KW#fJ1kU3zQ!wON-a=10NQ87wwb%6LRQHnNzVok~O}hUVsF`(;T3r*TuC}N0kXv5o)1FlPiM+Bqt}hut8}4Q~S}Hl}cCEA^@pEl%fTo9TnOE z5;!qR0U`~r9Ux&7qZFX$wE$!QJWT-AasYwrihB-=rayj^whh-tom(<6q$B9d zZUq^P7R@|EduBNavK9kK0a0o+4?xA*0Wx4#9hQ{S4v_F!bx8Vx+?{3s83>O8AUKu; z7R5-2!lIdB=SZ6jp>5M1b)#+7g073t3W?bexF?D1dr=>Y&`=aP=RG=KRF>NSOQy95 zK)et|<53k_05UKoLpwl*rDX5|WCT1=*3s1jpuM#X5*RF;GwnaH88>Ycu5CP3rYl6q zMjop1khimkM{gLVb|XErK`9BJ!`9JjPoHdbLU(bm z;eEj(uqd?P&>oz1`XpVG5SEpLMGg41O+(c*@m(RvVTLqR$Rvb$EPmC{;Fw=5eU(@q zfM-E*{{K4m?)@;dfs>DWA9{;2*ESMcghxGlkqgj#6g@N7fPjz(bJITSk)MJkc}X&3 zx1n||Scj*RSZZ`#x$)as6IUTgi=&nY;DLm932`IpiqozPb@`WM;c2AddJtCz%c<}x zlTT7LK>|GFFhd$DOoH+&LAOZEBO#raL9xrfVDKn#VxV-BG6@wi5acWy8uM^nb<*3C zF2kbP(>^3_>j4H&AJ*e?wdPcXIU#bR%Y(SN^(B7;+qG*q9Lts!hUfDDKvSRB0+0c->J*@QZ2-mV0!U8Bd1526=;cl}bkQ8tzni+Ng#wO^Uu3(L_tPcUJ2^F{|sY8r}6)1CKU{y0Ag40i>Wq#8V$DMynRd zXk`mr#M7(*DR#7h*J;LQ680?4Yz~kS`8@mp>4Aq_pJ?eknRs%@Ca6=I+r!mym(~ss zA4IM+m~%${$kj2BJP&es;J(Eua`v~}s5PX5=yquq0SGoEfnRZ&amirK05UQetT{mO z+VYs?G@CFn3XA4Hby++zco~HU>eLzaW&yLSEe#Z!GbVCj-N~NF)fFHbEb;NWAI%Ow z1wNeH15|rvqs0JH3^oD)2Bu^v0V+y2DU+}Xpi&+1NE_($Rg19bsnD~MPM#C!sK1x% zAX=wf-MX~Km`A83YRASRU?Q&vfoLGi&p=!xesa=!(en8>x#^F@M!Hf~mK6a~LS$G< zhHij_&#Ef{sw!;`4kW-spbWV@OXl1ZKNeC#V@a6X;(mxdSet;y4)0u*1N9VQ6mnIhyQEZyBO%Gb%x{I6!oXH>p9h>Ks5dJOCM%k^un0ed6UHP%Pb8m@^LR*1I5nOkq_hdUc^+S%FHIjIFJs_SQx=R!_ z{|}V3f?1%o4b%2-m&4)?76nK(Cekx8+8iL`lEGk!m8tc$a$f-|$Uu0~PAo}G2sF?{mwdqxbK&cGQ$%gni}UaT%W z>{iFH*vN(TF1pf6baWg*dmhXpN!;AVi65PqEqZ491+;wOpOAS+8#RZ)#91aeU3opr zM1U0TES(RaEFAz5U^3zeEO9c{qvEDbq@;7OZ2q63IpG(?4?U1W%5uNL;yAjv45nq} z!0F2Bz~yd^b&Rz}5@xDhSt1nNKIG>}ewB_*u5Bn$utQM)S>h>^Dn$#P{*b_Qi}v2A zWlB&7DvMeu3e}jpavVlt4oQvyTVrcNloqGbjn8N#ujME$ULBYWcGoQFO`)jyw?y-1 zd?*fmxYA*8|JiWuY&?g$Do4)Z__4Bjv$8v>bkFVZm;oftBGK_9@@pl%lXjej!A!LC zh#}9ohCi{{ZQ-mp-B&KY>P}({57N+{xyjh8FctPfr+T!$Mn30oz09XHQwIB^dljb1 z$^SVOsXW(wZ+)uVGjE;TvtW(PvtX@k@RmZ^+(Uch12(V6o&_nG{11DO9u@4h`w=yp@yLR7+-F_P_1>{dzv%Vc z{4?EWO|R#D_cC>41Q@6rEpfZPY}Qsw(iu+VtM zk?VfLxt-`8D*o)6RH0G0sdlU^c5qq%Bu%TN3R6ec{q<$PcmS#o?ctDy1vk>p({m{8 zE>kOk6c$U>a;ZxBKlm)ODnpQ`%TPxJEO2ZmdS9GBJEt$ZhK?H0Xj&UPI5rAX2R88L z$%0cK7N~Y(7NHkw?B3M1K;whO01!A0WE#NW=*IvFVBhg)$LPV1*_EBco1N2*U4tE( zRtl2?YqWMOIBn0yR9sp7qyVcUb1gnBpzXq7P*oT9KOgqljw+zIvtzojb2zbcN;KS) z9hz1SlqysTupC)~JF~`b&#VTY6#sW--*Hp{MHLo1Fn0-5nsA9VKvNapXEcv<*FF9Z XdJ+W}DiIkV00000NkvXXu0mjfKBlg6 diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png deleted file mode 100644 index 2c18de9e66108411737e910f5c1972476f03ddbf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9128 zcmb`NcT^K!5btji2)!5SAPPuNq)Ls56s4*38hVo^(nUfO6%ZAH(6N9hNR=iCp@USV zNUs_|I-wKc#ou}5-}laWIcKxU$(_yIot@8o_s%{sGSH@@=As4w(CO-E-X`sF|29fE z>HYT9T?zm$_~>e0H4dIw&!!4C9vSZxNlr9*d^_s#H!1R~WS_6MVYz@X@%G!e zXHz-tb|VivQj`iFZDUWNj>i`*9rwT8VC9f`)ww2)D0tG&WBFX^J|oMigqUy#_eV)Q z<3?;pz6pkr(;Z)thNWZ3Tu^XIU(m2~K2{iFEAS`~Gy5VW_tC>i*Cl0kv`b9xtW+!e zPD_a1*)E4YGCWy+8(ZVrP7}Y9URLg*>8E8fyY^0u;VQCkoBQJ<_5zdXl(d!zb~b;b z)6|dkG)>oK`*erN6Q98nTc z*T4b)onLqyA@?UYxy_MYQjd+D&|e(Pm(0oT&BjWQ4@?kFIoB**?M#(;rSUW9SnG<- zSt-|WaL6iG_P3uZd9eIpr{TtNWC*$Hh2Qz?uBS}bIbRfO#e{zRE!IEy&YexD%F}@N zL-y@k#YdI*GK@^S9Mw$gu9^2z1mSnEkrdxz+MPN|ZNhhS)_oYvhM)cLTYGn3J-&{3 z*gO%dE$+F=!pgEJp;TQOxUvmXY0MZXd)l&aIQ@q%&TOO4FwrA~ak$>;=zXV4zzr%` z=0~OcyNxrVAu`L~2ctf1)jOUXrl5QhI{u_3cR4;2>t?n_c`o(TMz?xA14+Wh$Va%BY0&2$WKO9mM2sYf3h-OCY*=ZOJ$Ngw)1D_iorRZXHQZi4&2K7qT927nQC0Lrg3 z(#lL522bDvLQQ|!4#s}u&v;Yf6v=QytSm1*VR`JzNHPFHGlJ!`WMgHC3lNnE^`=*0 zy?^9tJWsJlLSn+d=%5(DNQYCcv%)omexK}hyZmUHWQF=7JRFKXB_b-*?UD4{x!=dVwazRjll3YN!e1GQ6{ViI{ zhkd)N+MWKT`q_V0)j;tA_oAca{;nI(Y$Pb7t7Zgb7)DUREOEf@igE4Q;TqcgkX-wd zJ;8G+7!?>DALr#bk)GNchOvQs{BBN~iU1F0&RMR&ou$CHl>C|ZrZ@PkAenI@K>Al% zQ7|N8uxRTq4vM*lnm?oa%}HLn-3G$yJC_b75?=65k%LM)%(H@{N`65=i4pdO>Mz+= zLeav25B?f086=X6O6;%!2@%ZP1|;Nvbnj_2aSc+8ZOx$k{x3Drh^ zc*UWh!@lFm$>1}Uo>u2rUqXSar;=W-2Mqo41Pl(rQD;>HWC;@e#W@Z29HUt(caNqC zC&6BqG(7E8;B^rX*m6|Ejm>-6L>RWQs{?%J*!{N&Cn3FMX$DmBS8~(Emio*Dj(^J_ zk~mE@d*561epZk|Er>78iC#q_4Sp0Y3GD6B@JKKrmyoJG4WGBh)HqTZZw>kH>(OJH zlp#iE)N?g*Z@4^*MV+s+H!!1LJlIN*`JxC#o-v0{2|BS}}kDUMqX8%d%;Zo1pF*{G_rVrzNd`M2ya!T0DJTesuRVwL9u7n&PS ze_~l@1G?`(riUCq#<3T)^gi`sw~pk^JSP})C#_iBKTD*{^N7d0$A0wJ3#IRYe;0q4 zA*$YJb_LE1lo-`!M^fB~U00SLiLywh>%-_CXgSb{ju=7v+FzB+78O;y>TeZvRv&RoWxTLP?d+9Zi&Ypua2+{3 z?&P=TOQKt{%~L~p0$j8^;iia9j_>fKovkcwq%sUQ@nh>Z!)%cfJ0$;z4CPrz6I0OU z@+^ZT$qbq`@V*LyaM7l>CZ1ZQo!IplAN5a81(Tt~ztAbYc(d{@u2@?f2YdnGcoX!#60Ixw-Nvix#$k1X*NJg)beTLqL8^6*<{2f@@ns|Q}RjZ!$JIHK8NbS8xrmu#@ z6ulfiVr7xxNb~dV#acSrSX_pQm;bUeyjdV!{OZy#M4(A` zwu81?V`O!?oZ`D{REMi+x!1hB*6Cy(I?k8T%kET=uKQWo39E}=ca$my=uHTEyP8y z54Nz1YH*)(w%#ztIo^C*PQOjte`Hel~gpFN_jZaXoFZnUzuu<)94E6T<5ZU?s4>c zpU3Uo@d?+!hgYmVil!6X(ly;KNm*OwbI8{z3v|%I_4HT>Nt&7^q0@@SPXaA`iAvAR zSr*v1muELwpeL3wqu$P7L5q4m)-N%|J6fE`4!V+xyrOkr+X2!LT$k#tFYksHJH=n z3F!I2Qe4B5pnFmAer;+($yQcgD*uHlDurPx@2dd)1-RjhQe(5`*~SLS`q|S9v+`3~ zQ>IMi+hcTX^%}_YWT=}koWlGSwSH~mOvRNJ&Sfrc>H__ux(6*kTUubhdoQN>V2}J< zR)ymBx4g=I%zlp1J+QjI7joltSLskIt}qG%d@lfB@0(d>+A&l+Glwv&La86NxDmfT zNv>`p7eT?@iBSF8R6M^wCx1D;HRt!F#6s8>2mF;&B-MF;2m~@G4CaiZ!p=4aG-$V0 zYR+PtSNvY$YwW0OPYxL-i+8&!G0&s(?(IcQ&Iv2 z0Nx*-7_~pZT6#2L-so8nF7QMgH5}#22w+dCGMyllm->HAO8q%eYuJ_BHB7343cyG+ zgo9$W05T7{CPl`Zw^P=q+#rx_`T2%M zMCeCJLfZT%fI{csusPnQ7Xv@XSzVNmPU{iX2w134>~=VfgQ82*rq^p^97wA647vgT`a# z85e!NpbSl#8uA*dnopv4RMby4F4MY{UFn^r{Li3l%Ume;QtBh5?8wCixw0*zSQ${* z6)@M`djm|Nz;H2K_j1ACvx90`pqKN#`9b8Cd=@J|$6R{ZYc5yw){(D1GtABWH=Zy` z-HxQuV(8LOB`UjI4iAOJ34LY@KVEmPb@XIC)FfA6m5B&*8T*hQyR{mweAL1#*kA9n z;O}eZUE%DcD;yjrQM!F!8~hPzPrCH2Fvr-ItjJE$$pV*gv9>ye(q2lsB=uQP$h%X% zlekK6q~fP4niGy&O9mR~_I;)G@;?e;L8#rja{}{3_rR(d$+fAsX?PiFx`2ashkOGP zw9A><#);kE3G}H}!W&WxH1$sg*P@*n!{=#L{PK)y~GHI;RsgpA$#8cpY~ zct*9kjG$l!k{*0T43n={dVV!idt6Zw;lPW%!2K;#E>?J>D|V%r^A`&*)MdYZJT>jL z*;x5TTDFevc8OARtqyN`Wyt;0MTTO-DDG|wtNxUqM1$~ye0&&wUtZ&eqI0=0|Y{WT*|Ia1An)J!bjzf9y3P874R^|FamuD zD47YqkS6Zsd3^fEq_zq1i3zN7fM#ldxb7Z@0Y;<&n|qFI`e8q;TO3t$s`geh?U*oK zp&F$0CKJFD-a%BYO^4KA!5J4T1f9rK@Izkpt4qui#^S_s8AE_pvL7$dKQ z*TXfMJYx+MCq$g?pCj@15ZQdjbAm~v`@A?MCg`$$;e!iKvcv423 z^QOF{_mgOGh3-cDZ={Gyr z_&&UYqVw>f(5K`SHp~Mm5XB0N9$~=XOXd$uQNj=bO95ChnZX9K@n&#T?vXPDfqt07xJZVvBuujM>H*4hP6HvbJ~#$K=z-vNQnRCryVz5?3YqR02@1#K{#%aX?h4VQ45b zcmM<+1V?|eCnx}P7(IWh<1mpP1d4*Z4r1WAfB;C4dhrfKPC^**Pz;nD$YOJ0I9i3T zdQ`v*UjtnCM$WL`J8L<$;~1_X+Oyzj(IKG(tLOn!YS8Vny{ z@>lc1XCA-~hhrD7h1@0O)T))gw+GcvsVwxcnaCv{EQzu|qcwKGyiwb`TTP(}njGXHh$KxOryTWq$B1F6I8!hh2O<$rL^FOXZoKME=~3M&0eN93bd- zfpL<(mU)+asMc@#Mvb?Ws^Rw;E;iny$Mb$bu)1ovt0lOm4f(~cAmY<65o0ePN*$EX zrmHUhGI1J_t=@d`{#mmFd?eV^Q&jw>g^;Pf)7JHdLzQB*87{77?Kto0xMvGjC=&M5EOW+c zXpXOY6|Uf)0am19ZLde+hX5J6c11*#mSinvk^A4NWc#m5P)?v~|Bppv*0~T;-^rI9{w3{`~5)bC}`nF?zGx z#@S`#(Q@kl-1Fmze)A@u^#@9=c>MA>$*eslP^G`Zvb5N|sKK{mQ*V?4eX_x+nT?*N zalRRl;P=w1HG57g+d^AJQCZh4&g{?mbJZuj*>jJpGL#!`*C>{MRd4-HML#+BNUG#EHx5`rs8QUMda13u9eMG(lKCYTHCS2gO0L&PIU zkkI-^jv5$aR|blKRsJ6xJ^?au7%A7>eD6+l!ALkEL&*RPl442Nll#UeUv)cn5=YV~ zP)$eQ=SZYMG+hSAy@o*c95}KXP7(~*M%`ovFuZos#RM5t0XkRn?DdjD!7zh+HMGoz6C^Gk*}xdzg{VaE0-2L4An_I# z_)DVjA|u=a+{fkuUkWg+!HA~@f87&ENbQ{u_}}LPin9T}}BZ5K1W#~XT5z0gcc+cy7@$?+tH6Ta*1qVBL@ zBwd%m=LAwRv8~~Cx3MfLmwax@N%=M`ciGYizcDPi#Qug{`#^)V(iZGpR*3ayNFiWv zCT;%Yg?Tn;SO3Pvyu6Dolgt$Pq@8;O(nD{uHM<__6!t9UUP@K#N73GQB){T~9Hpci z<4P6T>Kb;ktBMTne4`e~@)E&sIdENQj5G9OYu`7~bvsRTeRl1z?i^aI{)?VNlekCC zXJKVy+B;Z0|Abe1cpfcW)93y`*4%NW#+1!-OVtut{#3Q5fvBQ-b<*gu4x4f6pmz-x)Q8wc+4G^!kGq??b_{28Zdu9+dS0=wgR`1Va^@f*j96v zE?=;Q{AtjKXi>F3-EkrPfL<`s@S z(Cl$t|NBt^_k;7j{U(%~9iLt{7g5yFfhq?^mE$`_Z>W$9l{seeXUdzmz8$X$3_fz0 zNc_d*naeGkU7&S83}C%)Owd-QTjWCq)4F3puS?Y*tOH3*JX`9t7=HyB%;}BFw)~fX zP3M8Ef?E#|5Tf;EuVktd)#&vh7trJcyxkI{{O|eok{tE^hzi3_4LW$*rN)J?Qmy@$ z@GmJ)5nOLC0(h_C(Ayd(aO3hP5pxuMsRZfvoFgBCNNrsu!(1gLl_W1XDWi)1KiM4& z4TFIN4Z44?71-@F^TGn<^DjNF#jfDTD;qdJ36mB3{oK$>kk1T9x32)H^4{v<&J$?GFZQeeKn zog^e?9JHCkaVAg{99*Xytpn)yWZ-y+!;hT(I=Fwaat_Fckc87LJ*r7!)y;@7k^fUK zxl{eySNWG_U%a8X+L`q+Pwk<%iyJN!iw;Q%=1>$p(4~A8CwtPS13^pt$BA_79TEm3 z!hx@gB4KmstaCTszUdc8*ch3y0f@{;*awP0cxYg(J0u?XLQsFzBA;#(`vHd`I*lBM z;(99!j{626=)R8+$DgEz-MfuzaGI&_b*%9#-BUQaw^>IHgp<=gob@UA0r`@#>-qw0 zpfFP4HZ?#}t^J2jFG?J|6<^ALo3?t>Oz5`IuInteCESw+$NTFo3L77A?}>NbqA$vz z-v81kRTwtLT8^1Hkf#X&iRsn`fKmr-Mu&N{*qwp;$qBXyT}BAQ@L;wB^UWEXX)3_b zh&*ke8czIhFd!IxCi_N!jnrKGIQpfPR2xJo1%*JNF^PvDwB;>G~7@ zQVZ23Q}9_P0C|)?QPY(DS0!&Y!!b^`S|XCy zKNy*Kil!;HIXgI}+mn{ko*V0S7_|JPJm`{p{nOe9Vi^>B;a*toh zNY>_;v-=$AgIA44ebwp@a!75wJN7K9j;+SW z8uoQjVUb03=55d=@#Y_9`Fs=Ut|9xs?0ce>@0mn&q+oSJdb^!tTO8;mb$%l));(4- zKPebA@3lPn z@G1otTd9DCo-AAllf-ruy4anJn=H{RXLG>6j;g|@m(&__Lzek=U-sRZzRO1lOrtOJ zm+5k9slTfFKsku7%a$T6ENphjA3uy9eG=kh6ii90n}D&mc!E$-XY)ycsx6qljq9PY zpDzzbG!`4}xmvrE+7f*Jx351b!!}L5XmvDjt;&0$*g9U$nbVZwscA2!5>S?vG~K*d zPzXIIrnkt|yfEO5^dk>cVc0*&Hh$%zYA8nPL(Hwwk?vVuZpJ+&#LxCsujZ^dalGUq zk8X*2y(traI^+1KZEu-(_j%t<)w?tI>hVd#CUfisw!-|mSM{#>X=67C83>oRW^)Nc z_@hYvV5!q}p#c+`qTV9*kqk5GkA6Z;&)MXHw7m;gzS)ito45k#Ejt_oX>5cfTLfXUX@_N^+#UicK@ zbUwcCAj!Nyi??H{sraN8NiTB?aleSuG-iy_c^*{zg2xn*m1e+7rBnP~o!PuP9z$Gcf(C!4f_G&|`v9JI zHr460gE4qwW4yYiYMyx4c#(d_<1JDCcBZLe=D9DE4fC#q8)2D2Dpnaszf0h1)i*7) zxyKd8y*&dyiKySsH2Uj5(~gfdkoWmaI$)6ycN3CquawfZ+R8$$x+k;L>%Fd*;XYy0 zkq~3{maC~f(~h3ZUsXWo-EodvK!+KO{DW8g|IOnpPq%l@9Ky`Dd0%sz0@6$Ox`Aei I20H400LcNok^lez diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png deleted file mode 100644 index beed3cdd2c32af5114a7dc70b9ef5b698eb8797e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15132 zcmZvDWmr_-8||54h>`B@4yC)hOQZ#cM!EzfhmdZRPLWXQlpaz*O1gvrk&^D_^84TW z@jlOq4`=WFp4extwb#3MjEilFPELs0YL1Js)Fn* zzr}qsbfZ_wbNOa4S@vf>;bE~>+%RD!>v%IFV#WTd^7(B=#T|Xno7mV6xS4f=u6692 zQq~7{i;;}Y46D{(Y+R?~SpnS3W=+e#JKDJX-SSUi>9(#}mwE5Tv-r0dn5ZY||9_k1 zWM~Q&Gt=O&6oAqZ3T;9&9$g)JWBOFs0NWF6vYJZJ24_?zn}`jXIHjr$^?F69z!2p< zy%t?XyTRP;!zMXPY^&6kR$$J?UW%?3bCC4XDqr@?ukqAzCEf6lUi%~QE1bZLYf8h# zNIFjy{z&gk+iBasaZQZklPN%Bhl~H-pewWJX`t_4w;I)?=gcrEWq1%u$-pwhg=Fn& zj3nJfbY`j%G4F^8@$CZRg?Lweh*w;b>{2YdOIAi*x9?W^yUNovn|q?NJ#6TPeU_fVowC-#v9#b~gYH6zAw5m28>MUeJ4Tj* znIVgljj#XhW$ zhiz?z_2X4xbgPrk6@%1I-IDPigjXj6D_rk=N!MHKhrgxgN|sX9wAG{r8mKBc5uYx! zD6;oWKPFPVaeKY+;_tfGk8dnA3*mxhD6c6ylsqfXvWFU-T3PF_*(Y_!aR4ycp@UiK zL{0B(1-*H{F=ezF{RJj(g)4PzJx50@A1Bg2>XU|TM&*KjHze0G!vbN}?9#L0`)Mh& zSDg1vm!sTu701b=n&--{Q{n2DpuDb{%No!D^gwg^bAW&J!~L20v4&-T0QrdY*80B?ozklkW% z0rk7=VB9&#oB_RdT&RhUD^ z<%mehua9i+?=)hn7$VmdJdx(xObB8b; zd)9+r z`yz+r{dSM5hDz=4ys1#(+WoWqC+KtBRNG8x2R zkNK+s#C-E*)s>kZCpyIRfB`}hQ6FwUXyKlgYs)!v{kjY>{yEe5^Qr5JEe^d*zcU@; zK#oE%1w&_PZ%A@P#G}S>`1qbU0tkHPO<2-5_Uhe0Y6$FovD9c;Ov~qVD?l$$zpcmn z8BGk}4~3UeEkzOUc<9FqtY1TqoY%qGS&?kSM=O3g}NY85}H(VQS~6J6eJsX=%$ zf%etV-q-i9X(#Qm$6xDNs6>@0-*1b4*6TC?1v|R@FkpbQLy%N<#0-I&1swvEMn?Y( zQKWmqz2#a=uq>R|^cdhnkaB3z*DB@@Q=Jpj%9EBXLuo{WDl~W0E}qH^aARnpD#`Dn zAO=+iepMRRSE1j%9nTDc{=3ACQK(De^37Zvsl54F9`aO8G+M-hmV$3r9l|3HavVov z=cO%-IOVsvo}L%}Jm> zX9gR60KV3P&h$KA;XH%c12K@uFzJy5i9S6?U7BKXLk4&WhD>E$HbfP_Ojp5OF9rfm zT$`)n#dWaGB<22Cl)AZ@Gv7i0;!*>IUJv7##H1X4+Wx!Jki<;jka&jGH6W2$nzJ4> z6yD|%yOMzcBZj~}DSWA5Qj5Q$P>edSrrCzs=X;k&irN=Q9KBAfO4RZ>klxjm*H%`2m5c(y7Pw zcP@DyYA!WftG!MB6T>V!I>_ym+&LEFyikRHI`-j@U5hGl(;JWZbO|orN^1|6{D4+0 z>5k@1pQ`!&UM0WB;(#4ds`}Zu6)B_YebI)X)jZRhJn}_frc0jF4SFi~JHS=t;knPP z&yEu(+8%qK>YIlcGahTfF6Ze^7edgT$J`6#2qm|n26OTFDY|d8s~3hl zpLtuXp@mq2GW8<6|E)D{#yU2)#iuPY!=|5Hmo-<*yo(QYr$3HQqx#%vtHjS|I7NiRxC6lDQq< zTXIalFx_Ncd(TZ(!iRaFymyh~tc4h-VJo_vaMKP(y_b-@V9j{@6aA&=*?g2r3#HBa z-Q(IP$--;P*a%%PO{^%D$`G{5nl&>sUgEN|s^PG}Jh>ISvD%;O|psp}p`-pKAK?pbIHTV?a9?u}(q*GCDRrVm> z0lC9`wd;C96R!Yg%?DnK2`W*_@jf%9IPnwdr@BgGxWS)z)J>cDasy)mt3Y7)p=txP zM)#~H^+!85n&7b%$l{U`iUrdD?1+BT#+yClM)OQek##8!6GFE0paMGl~ znJT5wR_VzqeBv^?U47rJ0!hXwG=8QSN^}EyUNDp2J?(D#FGFgCo^@;lRCMe2zczB^ zM%9XHn3ccHp;wqZ^Uy8mD<>D6R1W$5gqQ>%@AfWuiX0~?SIt2=9&6BS)f-v(V+-C6 zBfbm+ypV$sk2v=A1#JUeO~Sbved*o%-1Huvn%MCF?%m%fP5;xCPP|-(b1@laO;e4- zd6?k_0KN;j`6NXEVgi#X0MXBw38O@O`lZ=y4(f@Vx@QT9*Vpgk{{$@lzYwyh%?NrN zGtU^kn)F6?fKBPA{djTaw^L#(7F&HK0b>+C#os)3 zXBq#MC^QE6lzK^4733pD>UE36G;-{`GpU&0a|`(V-vTwp@G~>2EL6F$*&3YMPp-<3 z$pGu8`_-xR9b-}m{9;+irLXejrTbK_!ep%zGnh;U{^iGo^_=F2)RW>Gnr99OXB*dm zfO+ugGg0L-0>cKR_lG&~a#|_x2{kD1`&ncdCyi6M^Lm931EU`O+-XCCFYRAnjs5f6 zUa^V+z|fk5UB$rN`lRE$u7^I~$Cjw-;Cp6f)HA(2LU;};f)pd4T8-D?I2up+3G(m$&;vg0~+JOD};L`gqqk*eJg+xpbq{T}SE4${0xj>in~=ldQi1rE&?>CiYw2 z#vg0Xtv2hPZfP@t{cR}nkn`imMzN%Ni-Y?Fuhn*~A(k1`mx6vQI)vLRy&;WKU0n}B z@ZJ|)Fn=>TPu!<>B>2~#eYSLuW5D_)A)V?!{Y4XguE!i#eiyl1d{uE|RTBFea zM(g%RB^85qT#!n$qYwxcyR1CEXmt{nlJiLD0Zs8{OI%+d`MxVXSwT?e&2t6`t3 za4o!LrCv}!1now|E(qC6Hf>E@-0qF^3NbW7_qjxU<9CDT$8j)VXDt{8H;2Pzmw@Nb zJ}1NB7;d^GlLw5^EU`sTe0n9Pg~GmQIXwnxEAeh@zS%X#f?&FG!fvUXW1I^%m4Huq zFb9-|D>sEz%pg}Dy}4S#5$%jBg@1FfhQKlNSk?MlP{oDv8s=i*#C%7KTfKRpT((!vAA*0?h5%4doY~|3yq_DA32&6T2RHbNq-AItD)b&W z5)Ng>T|a!hlRxqb6(lwy3n#TR>Q{5$zoTQ(7Yp23btrx0L6lb;lMIld_ZsBm;X65W zhL~-DK~O*?iR1lG`e>ZDti=^0@Hu{22rk-ri$|Mhlfjx zz}x1wtNp{S65T4sftJev1F_{RMAe{B#a1+VB3lE#HN&bH7Rc8 z9d*c27p;2oA4ZYZSk)abazBuwEu8=L?5J?TG~{R3V8o868I?F z#Lt>o_|ohZd7psYl9Vtz6-np(@R&^Q6yKF@# zKK_Phwv=G^eE6%t(B0N4(**az{Z$|8Nab8SLz)m@0bPk@Wo;!3I&BJu}Fl z{}e^!Iy||DQ~DlD9=@%{OB>I8fpV4ZTC})4v8^-k&+wR4`hMI|wtCe3@xtk*M_gV& zT7}a{1ERd3c8RiWPPBvInQ4k+GPxSExF}CJt9v>(EoD>AsA|3ioYaprn4PVQ}7|zFbK2=iyU{SL8K#I2+N-*;IUC zGNwTD;XDPHkYcjzxc(jT?|J#?A9c3l*&Jc_`dkI4Rs7QC{PM6ty6TzkxCMvgm=@WZ zf59SoAflkydVV7?TYoT5`U(N`-HxGa2z_V)YRIz`HRRE3`12J1-lEtmojvMCPtH+1 z)V=IiqG9TR@`K%FOk2#6!1{1OD;*%xRAYo%)EDc|<)I;%EXi}?^()_B6K`pYE*`4Sg)tmZ&*^v8jAGJgK-rh(nO znii&AGyPojK+Ee9+EI?hH-rm&m>=`lAO7{E>D1JKm7n{&r&z%Cwi})WQZ*k0bJ6u=B0Pn1}ek~+ch_lXwn zuc_uu@YRZb$iGWq5BG|g|^Wd_oh(t2hEHAQ>~0CE_L3eNN1(NZ={TZ z*Q&K4gY{whUfZO+x8Pi73^^HTU(N+4u|z~}-7IGjQufEje1K4zazaTk96zyU#Oomt z{bZ_BZ#I(ren>G~3QNkj-ElHS()&+TCR+bjq4vO-*_o`jyU7mwVd?J!edfIxKubK~ znqmum7Gd^m1|fh?4|kW$?Yo6*!cTvq_fNlm%+Olmz3Wf^I(4mQ zO~z#3)9fPojD(VbPK-c6xq)}DM$borMa#X!P?x0&SBqzQG-BST1On6bd~bfeDWpmL zg;dMkgsT6muQ^9L>bR6T?+9!G07EA3XvMR&Q}8^MSfgNeA zEzFXFyts}my(yK#E3|dx>wH+PW-82HFn_p_ z{;sH%Izw2f?je+3ZGMKbJJ%-MUk6I$Q3lW`X#vZ{OC+X9zuDb|vQX4W2a2z2W*Oj)w$<7+lPbGYqEE4!Y z5j4*J(;o`UAc^wryi7M1qZAX{UySopT5y$cT@|8wdo0j-F+*z55(QN4-0X9E2(%0w z->Pj3_BQrPW?JjaUyorsqkqgQ;wow+pkug_qLB3byas`FE+^x`c+_Iv!A2o)GczmY zAV6d5;m~?7FDJ}pHp;5ORZwuDRq(s2BNghbg+aq0nsM$z_3LiUp~h}O&p9WQTkF%8 zM=j%0_<0RSBT*koU?wS=bWkoexJwQclztyKASoPa^=_gN4ebgz`-%PQ4pC%-=4Vq0 zfe#O}LUsDlrtPI4qXRa|3{g~nzfS$+u@EI(83`y$`zM*F4ZrP)V>J3FyYXx}ZGKDg zcnAHvt{Rs*n3G9nWAYgvN_?47{`Qg%8)$u7L&yUCg=`X~0xo?Nm zOT?BaawiXVZT^N9@PB8m9mlRme!pMhW#CUp&O)q1Ff49V5&%z22#hJ2F`M#8APaP0 z$_Rp4aJOUiQWa7(@mp|%WL)nG$d&Zv_rF<$bdOHX?n0#JYw}R-L?73ZR{Dh~d)_hC zut16KfP{BGRQ-I6p%4Q2bsb~&j&!tu<3}y`>iw3ht$>i661@OYn_Xr&XV#5d@S|oP zA@W{))lxW_UJQXd+s5{jYwPj)u*;o$QivH&LtwNF#bMPtindqcy_Sg_0jNOW`lS26z`VMFkJaH+Sv!=ug__rdCdmKpW)`?T6Ob{o>w!vsy+D z-B>}mgAw_|pUbN&6M&;nPF~<=LStpG+Z5n5r71uf?m?gQ-F4dx9x_V$5%CbECK$Gw zzJ2<^i95T446#0C`xOGneN913e!;7o!R%C)^uMCe0=Tn<*P?H{k7Z&~3QPz=NJW=T zj3CEU61-h1U6W|>zbw|;d_CCnt>k5|J0cEO>N_La+8&pSKU3E{M-On-Vw%ehQ{LlX zxIB8%LF!fTxKT!H6<|d62Qh9ehYjV*#xl%&Z~JpAI7ZChyU6I`b9k!^*geM*&r!)0 z`P_*C_$(P{7dfN3zXX2lZVtYo4StL|JW2|=e>3xO1G$K#=;n=dYTEcI0n01mkFdT* zZlxjCcP7Y5aQ>oPVpawo8YKRl#hc>oIaxO{*fKmVk?3H*sQ8bIy$$PNS zm^QUJj;!T<|8X&Tmhjigq?%e(ppMY%uLMndna;mU(!hA{kXVc%0H6AUgIMB;Y2q3as&sY398#kE0 zW83CIlm!|%OO&SzQ41d zS$iN9BrRi!79O=xyI?ngbQV~+RpO` zgt2WYwEdm=V<3qZ)gKkzTAP9Zf$LsE<)l0?cLpV{+UkiYYIQGnS~Bad;H{xUx0IA93P!Z$Ub zRs}&&XlPF1+UESgi+B-d`JNY2Bfq~xE9@Kpnx?;#;mg;m75vQ*?*d4Tztw|nTLS^Y zH-`iqEf>b-r);F3Q~_D`cZH$BGWu)siXg~pRDs3)1|az7kgqJm2#$NR_{p2Y23-4BY)ULyBEa^$KdzDc9uq0^ACB~H-gaD=Y4z@9VVD}V$kHmZY*Zd--RR|Y0w6WlPWsSq`9?!a)pOu312EGz zk4m+W%p>D^0mr(5WfHSjGm4$@-XbLhSU&;M=<@H`iuaG1?)qq49eVAA5|f{k5V){} z8uBYG8s*=a?&=i4q?=aPx<^%phdi8kO`X$JJFg~83BLUMcYF-+MJbGo^^{rW9Z@->vG69q4q3;`%j1PYG2lz1;eHLUAMDldZP&8yIZ=zAT!_W^5Gh_b#n%EiU zZ%Fin+oCFPL;K`A8?8xGtUp%fnKU^o)jCC>R2*P%Cfi#_LmHjMEJxhmc}|a?*)R;# zbyHfgLFFpb00`ZaHUnRQmT#aiiK}x0gu+pd23%n_RUjE4QhiC3{(j_k)DA`~jo|p# z#u5J(u73}=8;tpFvdM1RcA}^T|4=?G_T`x+6LdEhUm=K9erRBQI z%4?gf+wXzRB%6mX!*t}t3Kv1nsQ~!hZbTr0bFyUkaDfV!snDh2##9g(Hhul2EW747 zgi;TxQ%{3b>Mc4N=|y#vIG(4HW=>NnpTpmFun$Rj02m`#o`ex0ONfET z4F{r7@emkC;R~!#dbkG?-M#lhIS+y-buu?tP{T}iowTIQI|Q3D*0|PFM=K&Z8(ngl zIFhy237n_38l?NRLR4+dQiB2V$&rEkfgtk?a6l=H7ExIM41_<)P%KaggZNGFqMZAL zMY&tS8=|yPYSZZFA&!dSI@Tu^@(_*Fml5a%4cZC)7jK+63+eEuZ3PCX_~(AjQOo`= zNPnlQ)GVKn42^BzfT?X|&6O%hoWj^?UbjQVlhMl_0`x{xa=q49T>Mx-$^2R5#O^pn z>2!Sz?&CdJ65j%GFWASd4pIV3tzxpdURHySx^q=6dVRBZ3a7`JP?PSBjkcQPh@?pe)x&( zA66UTKY_1wx3-Ur8yZU zi(!nn?u&oDM9#cLFP7RGZ@liCG@JKro%!fz2GqHc@fk04klM@5*ths6nRZJ%lI|p) ztyuO1VIcggf?H~xX6i7k&p4~V9`G>zjntUEflyoQ^SD~$lBIr*#v)di`!hHHzZ~Wd zJ-QNEBRBq)fz4l2#_xXm8YV8KB%v!-2Is(P`1=|D+zIhS-F?ZUgd{4ZvFP};cKr74 zvi0T|HHv$hL!f3guj8b`g!f?>1v>B0gS~UEbJ?|HOB?fc^jFhtGDY1pfHBHP3X70`g0Pl;1%{(WPrw) zLA={hi)#y_&B|CHDe{&@tUa4*`Gx7EV=fZARJ1+2VgS0L3UZC@{Wc`R>bF^Y|J_=) z6@zu_xnjZE0yN`sSuL5S5%*$tR?_Sn;IN zk+q_-5?}{FkQtG0br0boxa+}qf_r@ocNJU^!H6bY#l--XDfxMU;d>>l#G-kxw=U|n z4oX{wIsAKre7G+PF-;OsE5di0T5MG_-(T zhUl%sTLJ_I(vT32H{#nS1y2{d~Bk*>z;1fMDT#15#7$-u6_Yo!o9QuS!|5#-{ zC0)T!;?6@2clqJa$)sMARqIYV;r+ zk0)L=B>56L%h)=EE^|VE0=oK*K#|t8- zuPFs$^fLQzLGuZ2ZmXe@id)*N@}ZDUnL1)Z8A52hime?+&Bx7u|5)K3ImXEMUQge< zM`(Zo{DDFnt^k6F1jF&@18xC^>12aHE)&2k zs@Nwb?4XI^>w*cbU-d#dTM%R#VlaWL2MW8>deH&l@xZNi1uJB>M`h5y{I|JcKhaAgcz;0;FDw2<~EhliI5igwCTS&^FLFZSoB$eD>H zD10LcRu|WoR}}rm2%pHJGsgh+eOu9q0~qG^b(v)v%8_%bfYg<>q0IYcTAhF-kNC49 zGRJPK;g!YDNi0#B-0xu-ox&gG{wQ(DTXtXWgzKH6KjnvR?85x$A$ZN+G0#8>XkFb9 z9zWb_5-`)TxAZ%jIz@ik!2)usZWY?tyjjOd<;04s^5^fjU8zy`7I$70NYN82zW6h| z$X=NbEUMsfM*!<{`)e40n^{H-)`KJX!(mZdv-cC!9L+JvSVnSO(VKcNP;t?UGtk!b zSPgVYsnD9ejE;FGyPg{6YW6R5Q$rGiy%J(H)2LXP4eT;Slga?wulT3;iy&;Ia=@Rj z!U(jtPyK}8ZWprMhYw6rMgQS66{Y=o_anEEOn1Vj*{8icX-1vaY{+vNoJDFj0{pO( zMG_NH%h3QMU|oF!Z9ocohL5ayn*Z36RiYk>2PU&{vAU1j? zkRdJ8tizF;3llfJ+zh|bK4_O(7pI-9w^Y4gTB0F9sU?J)5ad=AE{p>o;579Jw#@~5OWbag~+3Mnyph?f@wbwu8 z=fB{(_w#nycZtQsdzOuJ=!+1W3GvhPtLJ9m8OpCA&1MCEcLm9=MUSexJUgvMnqDuz zd3!`HT>912mxR#8IDT6FH+LT`QmrCDq@~pdJ?clm$SLSgUD~0uNXRqN&U+KZqw7Df zzDBzgap!mUAGRk7ciu7Jh?&{>=jdQn1ag0rfaz2*?e8k)dfhWih%4+tNn18&)E9RC<4z zeXoG((fW36d;|?kq_y=zW+bjMr=HBC9G6~Oz67sXY9iWf{^(T=lY^M^#K>_LyRTd# zP2auGUqc^`u^ubR5w4Vs@kxf)dChil)2=KRi>a|4o@pNTPdUTmaKG~`#_vwS6!#k6 z{+4VvCc;c#xdy8hCDR;Cl~`TpA&O_}1i*3^LT54QK|MZcr> z_WFbw0$>}L+Ody2Uo6A7WL7!Jjsi|{&4b%5B5BgX4~e|uY}|YIqYsLi98Q<{`IYRM zg6GJnsy+;=)vhXW#}ZcT6Xz)uFQxpe`U{DB-KsDH#Ubr*#odC)p9`{S*v9t${JC%W zNwRP4qvDI=x+u!)g-*90R-vYQbpgwWYEHiCSSi3znGDt6hfK_&?&t8e#l%}MMpBFl zxE>$Q97^qR@(KeM*(xar8JyGv7=1lKpu)}4U@!(Ggn@EP+h#cPr~OUH-`QqXhlhNd zjl-d^u9-i0$Gp!aVs!#8LeIRnr-PZYrSHxBwm7LpU-rGj%`%3{jJ$YGlC;!ih7QtL z?Zt!uX4Po`%PTiH$H>#58o08=3zvG`f%ntyD#+pAjuhI>e65GIil-1!j zY|&2)#*BgVwZTom3H=~rSH4u71~5Evh9-a_APuJ-&g8=GsZ%XZ`qc>;Jya=i6~{(4 zze`0_$3fz?k)M$&6Q&2k9O@)|ms0J}WX+PQI!AD_7a~rK?MmT=*{6>HgTC8@7F?wW zQvP*i_&d*0XyEkG>uvdgHGS``HxH~dcZ(_r(SdxGqHQ%PTNR$W9pbwF`p%+Ykchrg zd;ZKP$e_{BKpcRu)<0Yc9BtI9zz>QDE10>pjI*RY^gW>ul4rjnPF^nE9*z_fjWPsx z;rz(NO!21+*w8E;HQ$iEs5?KQdY&WrS6@)|)f2@QGGUNb`pZ9QAe|~5VNk^MzNK=| z;9mAK2uc9Z4dpSjUqcHr9b7A0l!Z0R|#ihlchp@I~KLoS?6Doh)_ zu=K%3UGOn9lpxZdn;Jp5l_rCG^PfI$I}&ztJSpaMC0Dy0lkx;${plYda`3~ne*P2} z9ns|~NVrt6b{V?dJkGZr?$|N@3Us`o=$|_;^#S3=1iixlG*FRl!;~WTtHWQYrv4vi zfe1%Iyo&Usa1;vcWijV9f7lG3%s-7n>1JhqP#>q+%Q)cm8&5xe%t7J#7D4;Pq!ZrW z*g^ioamw?yQzmW9rs}H{8t5HMq^f8a;yr5&UFlvWAEjU8sr=MHK{6`(@8X=pB5QW2 z)rThuRkfKID&7*$00)V;uz|kjA&u<%qJ(-ftQI~Y0{FUqmAQ!dX>BIlbU4uR1a+&@ zkmj#sFi6@RVdl;od8!Nb$k?GwV+%UZN9AD$I^SFxGhyZiYBo6^FlHMmi!Ic%74vOR zTbAhK$tdDL$9G>b!@nzjgEd46*Yv8FuSvFht22=+*rv|+4$3b zZ!3S9Pw}ln%eG1#?EZ^BG{yxDUxw|9&~c^5s(?Zdx-((jv z13BIiNg7v<)1Ffv6D%?fSr_TBhX^49!*M=iw(6`RQc?jsR0}$}pNjkz<6%^oMiYn`-l$ug_5e zS1DRhObQInw-Hk}ce)nOJZ9INf!2B`WzZ4KR@X3E!~FpiZ)K(=-8Jv@E0_O7vHoC^ z*mjWnD^9@x&n<51a}BtoDA5<;<}xSCC+OaWNZ$ME3m&cIdTfwC4Zm$M?e4xF(O$|$ zrSzuPFiN2WDjj&+{!K)`jnAnWe@$`zFB!7C_VUHc>G-^C$sIK&2Yo??dG8%0cY(-P z1rmXM{)O0gYP&rAn2vYb`0|l9nE3ECc_<5>4C^-IkP5A?DipVEh9TOz&DpiYx%6@C z#Dno^dc`iX8XU-yP(<05{clKW%B~$F$=^>896~*gwp&*&IxfA9fhpjF$7_{qs|GRM zLX+R8N{JxU6-9q%_r?JeOsI^WN_t7?pj&xEkHMow{;zu80jt}tvI zFD>(I?F<}NeZm5#`PrYw0M)P3Kz3*VPJFh2r$Th$n@AOsr`1dhA9WkD|k=MnY0PQDYtoFoJo3AVzoQ(6}uJ5 zwBXm2)hE`7bwu6b&XTa}cPj9p2ZnQpcF_$!1-P{a=mYqW?0lIKJ;w@^$6in|X0*YF`$DQZHSS134zF#>yPW_`4AM znjWs@7CMvwH&w=voOp3Nmp*fLCy%HIhrP5`8tIG_zpnAcnl=|XlAwc5huL$3P(55h z>c_yBe?U^0$VIy65!`OulJGuDnbnWNi(Y(X%(q+=wc|?Q2Wu_JnDJ&$*`0Aw!ZUIi zLNC5ADY4@dQNnc>jc?!5JbOc?nNQyEX>`M5$mfqT$&v=S?+6QQU0tZYtev?)e4p?- zY{z1l6g8L;7w5*j(|auG#MUb~C2FLD6F18@z+LutDU_~ID;*L^^u`B!#;k#f{-zo9?Ko4_oPY}^K;S}Z+?xf&NYM^|v z*pkvo9N^|^q7*<0z0x+Hj+W+}ccPQ$H(-$H-?fpVpC<>uExt9k+(1qEU9M}vo%HvX0RkxaW5 z=KK>pm4^BzfJRm1U%B1g>RZ@jDfLn$`jQ>x1y$v|mymsRDCL?c!YkXHKGa-HgE^c< z&YfRD-oQYl9&jEJOV>1l30cc7hM{sP6OEbF4?M=-nqywL<U9Y?sIr@s$(G5wcSm@dzPD$+RR=zaQD*X%5`4WL^3uN+b)z#*3hP*#P%bC@!UE zZ>`)nYW}1sbTh`W{0WJAY;H1vzX&xGt4PFK9HgIS)leN-3# diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/arrays.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/arrays.xml deleted file mode 100644 index c7cf123788..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/arrays.xml +++ /dev/null @@ -1,39 +0,0 @@ - - - - 1 threads - 2 threads - 4 threads - 8 threads - - - 1 - 2 - 4 - 8 - - - HIGH(only big cores) - LOW(only LITTLE cores) - FULL(all cores) - NO_BIND(depends on system) - RAND_HIGH - RAND_LOW - - - LITE_POWER_HIGH - LITE_POWER_LOW - LITE_POWER_FULL - LITE_POWER_NO_BIND - LITE_POWER_RAND_HIGH - LITE_POWER_RAND_LOW - - - true - false - - - true - false - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/colors.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/colors.xml deleted file mode 100644 index f8ec1f0c3b..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/colors.xml +++ /dev/null @@ -1,22 +0,0 @@ - - - #008577 - #00574B - #D81B60 - #FF000000 - #00000000 - #00000000 - #FFFFFFFF - - #000000 - #3B85F5 - #F5A623 - #FFFFFF - - #EEEEEE - - #3B85F5 - #333333 - #E5E5E5 - #3b85f5 - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/dimens.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/dimens.xml deleted file mode 100644 index 2df89499da..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/dimens.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - 26dp - 36dp - 34dp - 60dp - 16dp - 67dp - 67dp - 56dp - 56dp - 46dp - 46dp - 32dp - 24dp - 16dp - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/strings.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/strings.xml deleted file mode 100644 index b5c396f5f7..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/strings.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - EasyEdge - - EasyEdge - EasyEdge - EasyEdge - EasyEdge - EasyEdge - - CHOOSE_INSTALLED_MODEL_KEY - MODEL_DIR_KEY - LABEL_PATH_KEY - CPU_THREAD_NUM_KEY - CPU_POWER_MODE_KEY - SCORE_THRESHOLD_KEY - ENABLE_LITE_FP16_MODE_KEY - - 2 - LITE_POWER_HIGH - 0.4 - 0.1 - 0.25 - true - - - models/picodet_s_320_coco_lcnet - labels/coco_label_list.txt - - models - labels/ppocr_keys_v1.txt - - models/MobileNetV1_x0_25_infer - labels/imagenet1k_label_list.txt - - models/scrfd_500m_bnkps_shape320x320_pd - - models/human_pp_humansegv1_lite_192x192_inference_model - - 拍照识别 - 实时识别 - < - 模型名称 - 识别结果 - 序号 - 名称 - 置信度 - 阈值控制 - 重新识别 - 保存结果 - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/styles.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/styles.xml deleted file mode 100644 index 67c1475944..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/styles.xml +++ /dev/null @@ -1,70 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/values.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/values.xml deleted file mode 100644 index 156146d9ad..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/values/values.xml +++ /dev/null @@ -1,17 +0,0 @@ - - - 120dp - 46px - - 126px - 136px - - 46px - - 36px - - 15dp - - 15dp - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/xml/ocr_settings.xml b/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/xml/ocr_settings.xml deleted file mode 100644 index 692b74b4cd..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/app/src/main/res/xml/ocr_settings.xml +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/examples/vision/ocr/PP-OCRv3/android/build.gradle b/examples/vision/ocr/PP-OCRv3/android/build.gradle deleted file mode 100644 index d8d678b3ff..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -// Top-level build file where you can add configuration options common to all sub-projects/modules. -//plugins { -// id 'com.android.application' version '7.2.2' apply false -// id 'com.android.library' version '7.2.2' apply false -//} -// -//task clean(type: Delete) { -// delete rootProject.buildDir -//} - -buildscript { - repositories { - google() - jcenter() - // mavenCentral() - - } - dependencies { - classpath 'com.android.tools.build:gradle:7.2.2' - - // NOTE: Do not place your application dependencies here; they belong - // in the individual module build.gradle files - } -} - -allprojects { - repositories { - google() - jcenter() - // mavenCentral() - - } -} - -task clean(type: Delete) { - delete rootProject.buildDir -} diff --git a/examples/vision/ocr/PP-OCRv3/android/gradle.properties b/examples/vision/ocr/PP-OCRv3/android/gradle.properties deleted file mode 100644 index ae995d47cc..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/gradle.properties +++ /dev/null @@ -1,13 +0,0 @@ -# Project-wide Gradle settings. -# IDE (e.g. Android Studio) users: -# Gradle settings configured through the IDE *will override* -# any settings specified in this file. -# For more details on how to configure your build environment visit -# http://www.gradle.org/docs/current/userguide/build_environment.html -# Specifies the JVM arguments used for the daemon process. -# The setting is particularly useful for tweaking memory settings. -org.gradle.jvmargs=-Xmx3096m -# When configured, Gradle will run in incubating parallel mode. -# This option should only be used with decoupled projects. More details, visit -# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects -# org.gradle.parallel=true diff --git a/examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.jar b/examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index e708b1c023ec8b20f512888fe07c5bd3ff77bb8f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 59203 zcma&O1CT9Y(k9%tZQHhO+qUh#ZQHhO+qmuS+qP|E@9xZO?0h@l{(r>DQ>P;GjjD{w zH}lENr;dU&FbEU?00aa80D$0M0RRB{U*7-#kbjS|qAG&4l5%47zyJ#WrfA#1$1Ctx zf&Z_d{GW=lf^w2#qRJ|CvSJUi(^E3iv~=^Z(zH}F)3Z%V3`@+rNB7gTVU{Bb~90p|f+0(v;nz01EG7yDMX9@S~__vVgv%rS$+?IH+oZ03D5zYrv|^ zC1J)SruYHmCki$jLBlTaE5&dFG9-kq3!^i>^UQL`%gn6)jz54$WDmeYdsBE9;PqZ_ zoGd=P4+|(-u4U1dbAVQrFWoNgNd;0nrghPFbQrJctO>nwDdI`Q^i0XJDUYm|T|RWc zZ3^Qgo_Qk$%Fvjj-G}1NB#ZJqIkh;kX%V{THPqOyiq)d)0+(r9o(qKlSp*hmK#iIY zA^)Vr$-Hz<#SF=0@tL@;dCQsm`V9s1vYNq}K1B)!XSK?=I1)tX+bUV52$YQu*0%fnWEukW>mxkz+%3-S!oguE8u#MGzST8_Dy^#U?fA@S#K$S@9msUiX!gd_ow>08w5)nX{-KxqMOo7d?k2&?Vf z&diGDtZr(0cwPe9z9FAUSD9KC)7(n^lMWuayCfxzy8EZsns%OEblHFSzP=cL6}?J| z0U$H!4S_TVjj<`6dy^2j`V`)mC;cB%* z8{>_%E1^FH!*{>4a7*C1v>~1*@TMcLK{7nEQ!_igZC}ikJ$*<$yHy>7)oy79A~#xE zWavoJOIOC$5b6*q*F_qN1>2#MY)AXVyr$6x4b=$x^*aqF*L?vmj>Mgv+|ITnw_BoW zO?jwHvNy^prH{9$rrik1#fhyU^MpFqF2fYEt(;4`Q&XWOGDH8k6M=%@fics4ajI;st# zCU^r1CK&|jzUhRMv;+W~6N;u<;#DI6cCw-otsc@IsN3MoSD^O`eNflIoR~l4*&-%RBYk@gb^|-JXs&~KuSEmMxB}xSb z@K76cXD=Y|=I&SNC2E+>Zg?R6E%DGCH5J1nU!A|@eX9oS(WPaMm==k2s_ueCqdZw| z&hqHp)47`c{BgwgvY2{xz%OIkY1xDwkw!<0veB#yF4ZKJyabhyyVS`gZepcFIk%e2 zTcrmt2@-8`7i-@5Nz>oQWFuMC_KlroCl(PLSodswHqJ3fn<;gxg9=}~3x_L3P`9Sn zChIf}8vCHvTriz~T2~FamRi?rh?>3bX1j}%bLH+uFX+p&+^aXbOK7clZxdU~6Uxgy z8R=obwO4dL%pmVo*Ktf=lH6hnlz_5k3cG;m8lgaPp~?eD!Yn2kf)tU6PF{kLyn|oI@eQ`F z3IF7~Blqg8-uwUuWZScRKn%c2_}dXB6Dx_&xR*n9M9LXasJhtZdr$vBY!rP{c@=)& z#!?L$2UrkvClwQO>U*fSMs67oSj2mxiJ$t;E|>q%Kh_GzzWWO&3;ufU%2z%ucBU8H z3WIwr$n)cfCXR&>tyB7BcSInK>=ByZA%;cVEJhcg<#6N{aZC4>K41XF>ZgjG`z_u& zGY?;Ad?-sgiOnI`oppF1o1Gurqbi*;#x2>+SSV6|1^G@ooVy@fg?wyf@0Y!UZ4!}nGuLeC^l)6pwkh|oRY`s1Pm$>zZ3u-83T|9 zGaKJIV3_x+u1>cRibsaJpJqhcm%?0-L;2 zitBrdRxNmb0OO2J%Y&Ym(6*`_P3&&5Bw157{o7LFguvxC$4&zTy#U=W*l&(Q2MNO} zfaUwYm{XtILD$3864IA_nn34oVa_g^FRuHL5wdUd)+W-p-iWCKe8m_cMHk+=? zeKX)M?Dt(|{r5t7IenkAXo%&EXIb-i^w+0CX0D=xApC=|Xy(`xy+QG^UyFe z+#J6h_&T5i#sV)hj3D4WN%z;2+jJcZxcI3*CHXGmOF3^)JD5j&wfX)e?-|V0GPuA+ zQFot%aEqGNJJHn$!_}#PaAvQ^{3-Ye7b}rWwrUmX53(|~i0v{}G_sI9uDch_brX&6 zWl5Ndj-AYg(W9CGfQf<6!YmY>Ey)+uYd_JNXH=>|`OH-CDCmcH(0%iD_aLlNHKH z7bcW-^5+QV$jK?R*)wZ>r9t}loM@XN&M-Pw=F#xn(;u3!(3SXXY^@=aoj70;_=QE9 zGghsG3ekq#N||u{4We_25U=y#T*S{4I{++Ku)> zQ!DZW;pVcn>b;&g2;YE#+V`v*Bl&Y-i@X6D*OpNA{G@JAXho&aOk(_j^weW{#3X5Y z%$q_wpb07EYPdmyH(1^09i$ca{O<}7) zRWncXdSPgBE%BM#by!E>tdnc$8RwUJg1*x($6$}ae$e9Knj8gvVZe#bLi!<+&BkFj zg@nOpDneyc+hU9P-;jmOSMN|*H#>^Ez#?;%C3hg_65leSUm;iz)UkW)jX#p)e&S&M z1|a?wDzV5NVnlhRBCd_;F87wp>6c<&nkgvC+!@KGiIqWY4l}=&1w7|r6{oBN8xyzh zG$b#2=RJp_iq6)#t5%yLkKx(0@D=C3w+oiXtSuaQ%I1WIb-eiE$d~!)b@|4XLy!CZ z9p=t=%3ad@Ep+<9003D2KZ5VyP~_n$=;~r&YUg5UZ0KVD&tR1DHy9x)qWtKJp#Kq# zP*8p#W(8JJ_*h_3W}FlvRam?<4Z+-H77^$Lvi+#vmhL9J zJ<1SV45xi;SrO2f=-OB(7#iNA5)x1uNC-yNxUw|!00vcW2PufRm>e~toH;M0Q85MQLWd?3O{i8H+5VkR@l9Dg-ma ze2fZ%>G(u5(k9EHj2L6!;(KZ8%8|*-1V|B#EagbF(rc+5iL_5;Eu)L4Z-V;0HfK4d z*{utLse_rvHZeQ>V5H=f78M3Ntg1BPxFCVD{HbNA6?9*^YIq;B-DJd{Ca2L#)qWP? zvX^NhFmX?CTWw&Ns}lgs;r3i+Bq@y}Ul+U%pzOS0Fcv9~aB(0!>GT0)NO?p=25LjN z2bh>6RhgqD7bQj#k-KOm@JLgMa6>%-ok1WpOe)FS^XOU{c?d5shG(lIn3GiVBxmg`u%-j=)^v&pX1JecJics3&jvPI)mDut52? z3jEA)DM%}BYbxxKrizVYwq?(P&19EXlwD9^-6J+4!}9{ywR9Gk42jjAURAF&EO|~N z)?s>$Da@ikI4|^z0e{r`J8zIs>SpM~Vn^{3fArRu;?+43>lD+^XtUcY1HidJwnR6+ z!;oG2=B6Z_=M%*{z-RaHc(n|1RTKQdNjjV!Pn9lFt^4w|AeN06*j}ZyhqZ^!-=cyGP_ShV1rGxkx8t zB;8`h!S{LD%ot``700d0@Grql(DTt4Awgmi+Yr0@#jbe=2#UkK%rv=OLqF)9D7D1j z!~McAwMYkeaL$~kI~90)5vBhBzWYc3Cj1WI0RS`z000R8-@ET0dA~*r(gSiCJmQMN&4%1D zyVNf0?}sBH8zNbBLn>~(W{d3%@kL_eQ6jEcR{l>C|JK z(R-fA!z|TTRG40|zv}7E@PqCAXP3n`;%|SCQ|ZS%ym$I{`}t3KPL&^l5`3>yah4*6 zifO#{VNz3)?ZL$be;NEaAk9b#{tV?V7 zP|wf5YA*1;s<)9A4~l3BHzG&HH`1xNr#%){4xZ!jq%o=7nN*wMuXlFV{HaiQLJ`5G zBhDi#D(m`Q1pLh@Tq+L;OwuC52RdW7b8}~60WCOK5iYMUad9}7aWBuILb({5=z~YF zt?*Jr5NG+WadM{mDL>GyiByCuR)hd zA=HM?J6l1Xv0Dl+LW@w$OTcEoOda^nFCw*Sy^I@$sSuneMl{4ys)|RY#9&NxW4S)9 zq|%83IpslTLoz~&vTo!Ga@?rj_kw{|k{nv+w&Ku?fyk4Ki4I?);M|5Axm)t+BaE)D zm(`AQ#k^DWrjbuXoJf2{Aj^KT zFb1zMSqxq|vceV+Mf-)$oPflsO$@*A0n0Z!R{&(xh8s}=;t(lIy zv$S8x>m;vQNHuRzoaOo?eiWFe{0;$s`Bc+Osz~}Van${u;g(su`3lJ^TEfo~nERfP z)?aFzpDgnLYiERsKPu|0tq4l2wT)Atr6Qb%m-AUn6HnCue*yWICp7TjW$@sO zm5rm4aTcPQ(rfi7a`xP7cKCFrJD}*&_~xgLyr^-bmsL}y;A5P|al8J3WUoBSjqu%v zxC;mK!g(7r6RRJ852Z~feoC&sD3(6}^5-uLK8o)9{8L_%%rItZK9C){UxB|;G>JbP zsRRtS4-3B*5c+K2kvmgZK8472%l>3cntWUOVHxB|{Ay~aOg5RN;{PJgeVD*H%ac+y!h#wi%o2bF2Ca8IyMyH{>4#{E_8u^@+l-+n=V}Sq?$O z{091@v%Bd*3pk0^2UtiF9Z+(a@wy6 zUdw8J*ze$K#=$48IBi1U%;hmhO>lu!uU;+RS}p&6@rQila7WftH->*A4=5W|Fmtze z)7E}jh@cbmr9iup^i%*(uF%LG&!+Fyl@LFA-}Ca#bxRfDJAiR2dt6644TaYw1Ma79 zt8&DYj31j^5WPNf5P&{)J?WlCe@<3u^78wnd(Ja4^a>{^Tw}W>|Cjt^If|7l^l)^Q zbz|7~CF(k_9~n|h;ysZ+jHzkXf(*O*@5m zLzUmbHp=x!Q|!9NVXyipZ3)^GuIG$k;D)EK!a5=8MFLI_lpf`HPKl=-Ww%z8H_0$j ztJ||IfFG1lE9nmQ0+jPQy zCBdKkjArH@K7jVcMNz);Q(Q^R{d5G?-kk;Uu_IXSyWB)~KGIizZL(^&qF;|1PI7!E zTP`%l)gpX|OFn&)M%txpQ2F!hdA~hX1Cm5)IrdljqzRg!f{mN%G~H1&oqe`5eJCIF zHdD7O;AX-{XEV(a`gBFJ9ews#CVS2y!&>Cm_dm3C8*n3MA*e67(WC?uP@8TXuMroq z{#w$%z@CBIkRM7?}Xib+>hRjy?%G!fiw8! z8(gB+8J~KOU}yO7UGm&1g_MDJ$IXS!`+*b*QW2x)9>K~Y*E&bYMnjl6h!{17_8d!%&9D`a7r&LKZjC<&XOvTRaKJ1 zUY@hl5^R&kZl3lU3njk`3dPzxj$2foOL26r(9zsVF3n_F#v)s5vv3@dgs|lP#eylq62{<-vczqP!RpVBTgI>@O6&sU>W|do17+#OzQ7o5A$ICH z?GqwqnK^n2%LR;$^oZM;)+>$X3s2n}2jZ7CdWIW0lnGK-b#EG01)P@aU`pg}th&J-TrU`tIpb5t((0eu|!u zQz+3ZiOQ^?RxxK4;zs=l8q!-n7X{@jSwK(iqNFiRColuEOg}!7cyZi`iBX4g1pNBj zAPzL?P^Ljhn;1$r8?bc=#n|Ed7wB&oHcw()&*k#SS#h}jO?ZB246EGItsz*;^&tzp zu^YJ0=lwsi`eP_pU8}6JA7MS;9pfD;DsSsLo~ogzMNP70@@;Fm8f0^;>$Z>~}GWRw!W5J3tNX*^2+1f3hz{~rIzJo z6W%J(H!g-eI_J1>0juX$X4Cl6i+3wbc~k146UIX&G22}WE>0ga#WLsn9tY(&29zBvH1$`iWtTe zG2jYl@P!P)eb<5DsR72BdI7-zP&cZNI{7q3e@?N8IKc4DE#UVr->|-ryuJXk^u^>4 z$3wE~=q390;XuOQP~TNoDR?#|NSPJ%sTMInA6*rJ%go|=YjGe!B>z6u$IhgQSwoV* zjy3F2#I>uK{42{&IqP59)Y(1*Z>>#W8rCf4_eVsH)`v!P#^;BgzKDR`ARGEZzkNX+ zJUQu=*-ol=Xqqt5=`=pA@BIn@6a9G8C{c&`i^(i+BxQO9?YZ3iu%$$da&Kb?2kCCo zo7t$UpSFWqmydXf@l3bVJ=%K?SSw)|?srhJ-1ZdFu*5QhL$~-IQS!K1s@XzAtv6*Y zl8@(5BlWYLt1yAWy?rMD&bwze8bC3-GfNH=p zynNFCdxyX?K&G(ZZ)afguQ2|r;XoV^=^(;Cku#qYn4Lus`UeKt6rAlFo_rU`|Rq z&G?~iWMBio<78of-2X(ZYHx~=U0Vz4btyXkctMKdc9UM!vYr~B-(>)(Hc|D zMzkN4!PBg%tZoh+=Gba!0++d193gbMk2&krfDgcbx0jI92cq?FFESVg0D$>F+bil} zY~$)|>1HZsX=5sAZ2WgPB5P=8X#TI+NQ(M~GqyVB53c6IdX=k>Wu@A0Svf5#?uHaF zsYn|koIi3$(%GZ2+G+7Fv^lHTb#5b8sAHSTnL^qWZLM<(1|9|QFw9pnRU{svj}_Al zL)b9>fN{QiA($8peNEJyy`(a{&uh-T4_kdZFIVsKKVM(?05}76EEz?#W za^fiZOAd14IJ4zLX-n7Lq0qlQ^lW8Cvz4UKkV9~P}>sq0?xD3vg+$4vLm~C(+ zM{-3Z#qnZ09bJ>}j?6ry^h+@PfaD7*jZxBEY4)UG&daWb??6)TP+|3#Z&?GL?1i+280CFsE|vIXQbm| zM}Pk!U`U5NsNbyKzkrul-DzwB{X?n3E6?TUHr{M&+R*2%yOiXdW-_2Yd6?38M9Vy^ z*lE%gA{wwoSR~vN0=no}tP2Ul5Gk5M(Xq`$nw#ndFk`tcpd5A=Idue`XZ!FS>Q zG^0w#>P4pPG+*NC9gLP4x2m=cKP}YuS!l^?sHSFftZy{4CoQrb_ z^20(NnG`wAhMI=eq)SsIE~&Gp9Ne0nD4%Xiu|0Fj1UFk?6avDqjdXz{O1nKao*46y zT8~iA%Exu=G#{x=KD;_C&M+Zx4+n`sHT>^>=-1YM;H<72k>$py1?F3#T1*ef9mLZw z5naLQr?n7K;2l+{_uIw*_1nsTn~I|kkCgrn;|G~##hM;9l7Jy$yJfmk+&}W@JeKcF zx@@Woiz8qdi|D%aH3XTx5*wDlbs?dC1_nrFpm^QbG@wM=i2?Zg;$VK!c^Dp8<}BTI zyRhAq@#%2pGV49*Y5_mV4+OICP|%I(dQ7x=6Ob}>EjnB_-_18*xrY?b%-yEDT(wrO z9RY2QT0`_OpGfMObKHV;QLVnrK%mc?$WAdIT`kJQT^n%GuzE7|9@k3ci5fYOh(287 zuIbg!GB3xLg$YN=n)^pHGB0jH+_iIiC=nUcD;G6LuJsjn2VI1cyZx=a?ShCsF==QK z;q~*m&}L<-cb+mDDXzvvrRsybcgQ;Vg21P(uLv5I+eGc7o7tc6`;OA9{soHFOz zT~2?>Ts}gprIX$wRBb4yE>ot<8+*Bv`qbSDv*VtRi|cyWS>)Fjs>fkNOH-+PX&4(~ z&)T8Zam2L6puQl?;5zg9h<}k4#|yH9czHw;1jw-pwBM*O2hUR6yvHATrI%^mvs9q_ z&ccT0>f#eDG<^WG^q@oVqlJrhxH)dcq2cty@l3~|5#UDdExyXUmLQ}f4#;6fI{f^t zDCsgIJ~0`af%YR%Ma5VQq-p21k`vaBu6WE?66+5=XUd%Ay%D$irN>5LhluRWt7 zov-=f>QbMk*G##&DTQyou$s7UqjjW@k6=!I@!k+S{pP8R(2=e@io;N8E`EOB;OGoI zw6Q+{X1_I{OO0HPpBz!X!@`5YQ2)t{+!?M_iH25X(d~-Zx~cXnS9z>u?+If|iNJbx zyFU2d1!ITX64D|lE0Z{dLRqL1Ajj=CCMfC4lD3&mYR_R_VZ>_7_~|<^o*%_&jevU+ zQ4|qzci=0}Jydw|LXLCrOl1_P6Xf@c0$ieK2^7@A9UbF{@V_0p%lqW|L?5k>bVM8|p5v&2g;~r>B8uo<4N+`B zH{J)h;SYiIVx@#jI&p-v3dwL5QNV1oxPr8J%ooezTnLW>i*3Isb49%5i!&ac_dEXv zvXmVUck^QHmyrF8>CGXijC_R-y(Qr{3Zt~EmW)-nC!tiH`wlw5D*W7Pip;T?&j%kX z6DkZX4&}iw>hE(boLyjOoupf6JpvBG8}jIh!!VhnD0>}KSMMo{1#uU6kiFcA04~|7 zVO8eI&x1`g4CZ<2cYUI(n#wz2MtVFHx47yE5eL~8bot~>EHbevSt}LLMQX?odD{Ux zJMnam{d)W4da{l7&y-JrgiU~qY3$~}_F#G7|MxT)e;G{U`In&?`j<5D->}cb{}{T(4DF0BOk-=1195KB-E*o@c?`>y#4=dMtYtSY=&L{!TAjFVcq0y@AH`vH! z$41+u!Ld&}F^COPgL(EE{0X7LY&%D7-(?!kjFF7=qw<;`V{nwWBq<)1QiGJgUc^Vz ztMUlq1bZqKn17|6x6iAHbWc~l1HcmAxr%$Puv!znW)!JiukwIrqQ00|H$Z)OmGG@= zv%A8*4cq}(?qn4rN6o`$Y))(MyXr8R<2S^J+v(wmFmtac!%VOfN?&(8Nr!T@kV`N; z*Q33V3t`^rN&aBiHet)18wy{*wi1=W!B%B-Q6}SCrUl$~Hl{@!95ydml@FK8P=u4s z4e*7gV2s=YxEvskw2Ju!2%{8h01rx-3`NCPc(O zH&J0VH5etNB2KY6k4R@2Wvl^Ck$MoR3=)|SEclT2ccJ!RI9Nuter7u9@;sWf-%um;GfI!=eEIQ2l2p_YWUd{|6EG ze{yO6;lMc>;2tPrsNdi@&1K6(1;|$xe8vLgiouj%QD%gYk`4p{Ktv9|j+!OF-P?@p z;}SV|oIK)iwlBs+`ROXkhd&NK zzo__r!B>tOXpBJMDcv!Mq54P+n4(@dijL^EpO1wdg~q+!DT3lB<>9AANSe!T1XgC=J^)IP0XEZ()_vpu!!3HQyJhwh?r`Ae%Yr~b% zO*NY9t9#qWa@GCPYOF9aron7thfWT`eujS4`t2uG6)~JRTI;f(ZuoRQwjZjp5Pg34 z)rp$)Kr?R+KdJ;IO;pM{$6|2y=k_siqvp%)2||cHTe|b5Ht8&A{wazGNca zX$Ol?H)E_R@SDi~4{d-|8nGFhZPW;Cts1;08TwUvLLv&_2$O6Vt=M)X;g%HUr$&06 zISZb(6)Q3%?;3r~*3~USIg=HcJhFtHhIV(siOwV&QkQe#J%H9&E21!C*d@ln3E@J* zVqRO^<)V^ky-R|%{(9`l-(JXq9J)1r$`uQ8a}$vr9E^nNiI*thK8=&UZ0dsFN_eSl z(q~lnD?EymWLsNa3|1{CRPW60>DSkY9YQ;$4o3W7Ms&@&lv9eH!tk~N&dhqX&>K@} zi1g~GqglxkZ5pEFkllJ)Ta1I^c&Bt6#r(QLQ02yHTaJB~- zCcE=5tmi`UA>@P=1LBfBiqk)HB4t8D?02;9eXj~kVPwv?m{5&!&TFYhu>3=_ zsGmYZ^mo*-j69-42y&Jj0cBLLEulNRZ9vXE)8~mt9C#;tZs;=#M=1*hebkS;7(aGf zcs7zH(I8Eui9UU4L--))yy`&d&$In&VA2?DAEss4LAPCLd>-$i?lpXvn!gu^JJ$(DoUlc6wE98VLZ*z`QGQov5l4Fm_h?V-;mHLYDVOwKz7>e4+%AzeO>P6v}ndPW| zM>m#6Tnp7K?0mbK=>gV}=@k*0Mr_PVAgGMu$j+pWxzq4MAa&jpCDU&-5eH27Iz>m^ zax1?*HhG%pJ((tkR(V(O(L%7v7L%!_X->IjS3H5kuXQT2!ow(;%FDE>16&3r){!ex zhf==oJ!}YU89C9@mfDq!P3S4yx$aGB?rbtVH?sHpg?J5C->!_FHM%Hl3#D4eplxzQ zRA+<@LD%LKSkTk2NyWCg7u=$%F#;SIL44~S_OGR}JqX}X+=bc@swpiClB`Zbz|f!4 z7Ysah7OkR8liXfI`}IIwtEoL}(URrGe;IM8%{>b1SsqXh)~w}P>yiFRaE>}rEnNkT z!HXZUtxUp1NmFm)Dm@-{FI^aRQqpSkz}ZSyKR%Y}YHNzBk)ZIp} zMtS=aMvkgWKm9&oTcU0?S|L~CDqA+sHpOxwnswF-fEG)cXCzUR?ps@tZa$=O)=L+5 zf%m58cq8g_o}3?Bhh+c!w4(7AjxwQ3>WnVi<{{38g7yFboo>q|+7qs<$8CPXUFAN< zG&}BHbbyQ5n|qqSr?U~GY{@GJ{(Jny{bMaOG{|IkUj7tj^9pa9|FB_<+KHLxSxR;@ zHpS$4V)PP+tx}22fWx(Ku9y+}Ap;VZqD0AZW4gCDTPCG=zgJmF{|x;(rvdM|2|9a}cex6xrMkERnkE;}jvU-kmzd%_J50$M`lIPCKf+^*zL=@LW`1SaEc%=m zQ+lT06Gw+wVwvQ9fZ~#qd430v2HndFsBa9WjD0P}K(rZYdAt^5WQIvb%D^Q|pkVE^ zte$&#~zmULFACGfS#g=2OLOnIf2Of-k!(BIHjs77nr!5Q1*I9 z1%?=~#Oss!rV~?-6Gm~BWJiA4mJ5TY&iPm_$)H1_rTltuU1F3I(qTQ^U$S>%$l z)Wx1}R?ij0idp@8w-p!Oz{&*W;v*IA;JFHA9%nUvVDy7Q8woheC#|8QuDZb-L_5@R zOqHwrh|mVL9b=+$nJxM`3eE{O$sCt$UK^2@L$R(r^-_+z?lOo+me-VW=Zw z-Bn>$4ovfWd%SPY`ab-u9{INc*k2h+yH%toDHIyqQ zO68=u`N}RIIs7lsn1D){)~%>ByF<>i@qFb<-axvu(Z+6t7v<^z&gm9McRB~BIaDn$ z#xSGT!rzgad8o>~kyj#h1?7g96tOcCJniQ+*#=b7wPio>|6a1Z?_(TS{)KrPe}(8j z!#&A=k(&Pj^F;r)CI=Z{LVu>uj!_W1q4b`N1}E(i%;BWjbEcnD=mv$FL$l?zS6bW!{$7j1GR5ocn94P2u{ z70tAAcpqtQo<@cXw~@i-@6B23;317|l~S>CB?hR5qJ%J3EFgyBdJd^fHZu7AzHF(BQ!tyAz^L0`X z23S4Fe{2X$W0$zu9gm%rg~A>ijaE#GlYlrF9$ds^QtaszE#4M(OLVP2O-;XdT(XIC zatwzF*)1c+t~c{L=fMG8Z=k5lv>U0;C{caN1NItnuSMp)6G3mbahu>E#sj&oy94KC zpH}8oEw{G@N3pvHhp{^-YaZeH;K+T_1AUv;IKD<=mv^&Ueegrb!yf`4VlRl$M?wsl zZyFol(2|_QM`e_2lYSABpKR{{NlxlDSYQNkS;J66aT#MSiTx~;tUmvs-b*CrR4w=f z8+0;*th6kfZ3|5!Icx3RV11sp=?`0Jy3Fs0N4GZQMN=8HmT6%x9@{Dza)k}UwL6JT zHRDh;%!XwXr6yuuy`4;Xsn0zlR$k%r%9abS1;_v?`HX_hI|+EibVnlyE@3aL5vhQq zlIG?tN^w@0(v9M*&L+{_+RQZw=o|&BRPGB>e5=ys7H`nc8nx)|-g;s7mRc7hg{GJC zAe^vCIJhajmm7C6g! zL&!WAQ~5d_5)00?w_*|*H>3$loHrvFbitw#WvLB!JASO?#5Ig5$Ys10n>e4|3d;tS zELJ0|R4n3Az(Fl3-r^QiV_C;)lQ1_CW{5bKS15U|E9?ZgLec@%kXr84>5jV2a5v=w z?pB1GPdxD$IQL4)G||B_lI+A=08MUFFR4MxfGOu07vfIm+j=z9tp~5i_6jb`tR>qV z$#`=BQ*jpCjm$F0+F)L%xRlnS%#&gro6PiRfu^l!EVan|r3y}AHJQOORGx4~ z&<)3=K-tx518DZyp%|!EqpU!+X3Et7n2AaC5(AtrkW>_57i}$eqs$rupubg0a1+WO zGHZKLN2L0D;ab%{_S1Plm|hx8R?O14*w*f&2&bB050n!R2by zw!@XOQx$SqZ5I<(Qu$V6g>o#A!JVwErWv#(Pjx=KeS0@hxr4?13zj#oWwPS(7Ro|v z>Mp@Kmxo79q|}!5qtX2-O@U&&@6s~!I&)1WQIl?lTnh6UdKT_1R640S4~f=_xoN3- zI+O)$R@RjV$F=>Ti7BlnG1-cFKCC(t|Qjm{SalS~V-tX#+2ekRhwmN zZr`8{QF6y~Z!D|{=1*2D-JUa<(1Z=;!Ei!KiRNH?o{p5o3crFF=_pX9O-YyJchr$~ zRC`+G+8kx~fD2k*ZIiiIGR<8r&M@3H?%JVOfE>)})7ScOd&?OjgAGT@WVNSCZ8N(p zuQG~76GE3%(%h1*vUXg$vH{ua0b`sQ4f0*y=u~lgyb^!#CcPJa2mkSEHGLsnO^kb$ zru5_l#nu=Y{rSMWiYx?nO{8I!gH+?wEj~UM?IrG}E|bRIBUM>UlY<`T1EHpRr36vv zBi&dG8oxS|J$!zoaq{+JpJy+O^W(nt*|#g32bd&K^w-t>!Vu9N!k9eA8r!Xc{utY> zg9aZ(D2E0gL#W0MdjwES-7~Wa8iubPrd?8-$C4BP?*wok&O8+ykOx{P=Izx+G~hM8 z*9?BYz!T8~dzcZr#ux8kS7u7r@A#DogBH8km8Ry4slyie^n|GrTbO|cLhpqgMdsjX zJ_LdmM#I&4LqqsOUIXK8gW;V0B(7^$y#h3h>J0k^WJfAMeYek%Y-Dcb_+0zPJez!GM zAmJ1u;*rK=FNM0Nf}Y!!P9c4)HIkMnq^b;JFd!S3?_Qi2G#LIQ)TF|iHl~WKK6JmK zbv7rPE6VkYr_%_BT}CK8h=?%pk@3cz(UrZ{@h40%XgThP*-Oeo`T0eq9 zA8BnWZKzCy5e&&_GEsU4*;_k}(8l_&al5K-V*BFM=O~;MgRkYsOs%9eOY6s6AtE*<7GQAR2ulC3RAJrG_P1iQK5Z~&B z&f8X<>yJV6)oDGIlS$Y*D^Rj(cszTy5c81a5IwBr`BtnC6_e`ArI8CaTX_%rx7;cn zR-0?J_LFg*?(#n~G8cXut(1nVF0Oka$A$1FGcERU<^ggx;p@CZc?3UB41RY+wLS`LWFNSs~YP zuw1@DNN3lTd|jDL7gjBsd9}wIw}4xT2+8dBQzI00m<@?c2L%>}QLfK5%r!a-iII`p zX@`VEUH)uj^$;7jVUYdADQ2k*!1O3WdfgF?OMtUXNpQ1}QINamBTKDuv19^{$`8A1 zeq%q*O0mi@(%sZU>Xdb0Ru96CFqk9-L3pzLVsMQ`Xpa~N6CR{9Rm2)A|CI21L(%GW zh&)Y$BNHa=FD+=mBw3{qTgw)j0b!Eahs!rZnpu)z!!E$*eXE~##yaXz`KE5(nQM`s zD!$vW9XH)iMxu9R>r$VlLk9oIR%HxpUiW=BK@4U)|1WNQ=mz9a z^!KkO=>GaJ!GBXm{KJj^;kh-MkUlEQ%lza`-G&}C5y1>La1sR6hT=d*NeCnuK%_LV zOXt$}iP6(YJKc9j-Fxq~*ItVUqljQ8?oaysB-EYtFQp9oxZ|5m0^Hq(qV!S+hq#g( z?|i*H2MIr^Kxgz+3vIljQ*Feejy6S4v~jKEPTF~Qhq!(ms5>NGtRgO5vfPPc4Z^AM zTj!`5xEreIN)vaNxa|q6qWdg>+T`Ol0Uz)ckXBXEGvPNEL3R8hB3=C5`@=SYgAju1 z!)UBr{2~=~xa{b8>x2@C7weRAEuatC)3pkRhT#pMPTpSbA|tan%U7NGMvzmF?c!V8 z=pEWxbdXbTAGtWTyI?Fml%lEr-^AE}w#l(<7OIw;ctw}imYax&vR4UYNJZK6P7ZOd zP87XfhnUHxCUHhM@b*NbTi#(-8|wcv%3BGNs#zRCVV(W?1Qj6^PPQa<{yaBwZ`+<`w|;rqUY_C z&AeyKwwf*q#OW-F()lir=T^<^wjK65Lif$puuU5+tk$;e_EJ;Lu+pH>=-8=PDhkBg z8cWt%@$Sc#C6F$Vd+0507;{OOyT7Hs%nKS88q-W!$f~9*WGBpHGgNp}=C*7!RiZ5s zn1L_DbKF@B8kwhDiLKRB@lsXVVLK|ph=w%_`#owlf@s@V(pa`GY$8h%;-#h@TsO|Y8V=n@*!Rog7<7Cid%apR|x zOjhHCyfbIt%+*PCveTEcuiDi%Wx;O;+K=W?OFUV%)%~6;gl?<0%)?snDDqIvkHF{ zyI02)+lI9ov42^hL>ZRrh*HhjF9B$A@=H94iaBESBF=eC_KT$8A@uB^6$~o?3Wm5t1OIaqF^~><2?4e3c&)@wKn9bD? zoeCs;H>b8DL^F&>Xw-xjZEUFFTv>JD^O#1E#)CMBaG4DX9bD(Wtc8Rzq}9soQ8`jf zeSnHOL}<+WVSKp4kkq&?SbETjq6yr@4%SAqOG=9E(3YeLG9dtV+8vmzq+6PFPk{L; z(&d++iu=^F%b+ea$i2UeTC{R*0Isk;vFK!no<;L+(`y`3&H-~VTdKROkdyowo1iqR zbVW(3`+(PQ2>TKY>N!jGmGo7oeoB8O|P_!Ic@ zZ^;3dnuXo;WJ?S+)%P>{Hcg!Jz#2SI(s&dY4QAy_vRlmOh)QHvs_7c&zkJCmJGVvV zX;Mtb>QE+xp`KyciG$Cn*0?AK%-a|=o!+7x&&yzHQOS>8=B*R=niSnta^Pxp1`=md z#;$pS$4WCT?mbiCYU?FcHGZ#)kHVJTTBt^%XE(Q};aaO=Zik0UgLcc0I(tUpt(>|& zcxB_|fxCF7>&~5eJ=Dpn&5Aj{A^cV^^}(7w#p;HG&Q)EaN~~EqrE1qKrMAc&WXIE;>@<&)5;gD2?={Xf@Mvn@OJKw=8Mgn z!JUFMwD+s==JpjhroT&d{$kQAy%+d`a*XxDEVxy3`NHzmITrE`o!;5ClXNPb4t*8P zzAivdr{j_v!=9!^?T3y?gzmqDWX6mkzhIzJ-3S{T5bcCFMr&RPDryMcdwbBuZbsgN zGrp@^i?rcfN7v0NKGzDPGE#4yszxu=I_`MI%Z|10nFjU-UjQXXA?k8Pk|OE<(?ae) zE%vG#eZAlj*E7_3dx#Zz4kMLj>H^;}33UAankJiDy5ZvEhrjr`!9eMD8COp}U*hP+ zF}KIYx@pkccIgyxFm#LNw~G&`;o&5)2`5aogs`1~7cMZQ7zj!%L4E`2yzlQN6REX20&O<9 zKV6fyr)TScJPPzNTC2gL+0x#=u>(({{D7j)c-%tvqls3#Y?Z1m zV5WUE)zdJ{$p>yX;^P!UcXP?UD~YM;IRa#Rs5~l+*$&nO(;Ers`G=0D!twR(0GF@c zHl9E5DQI}Oz74n zfKP>&$q0($T4y$6w(p=ERAFh+>n%iaeRA%!T%<^+pg?M)@ucY<&59$x9M#n+V&>}=nO9wCV{O~lg&v#+jcUj(tQ z`0u1YH)-`U$15a{pBkGyPL0THv1P|4e@pf@3IBZS4dVJPo#H>pWq%Lr0YS-SeWash z8R7=jb28KPMI|_lo#GEO|5B?N_e``H*23{~a!AmUJ+fb4HX-%QI@lSEUxKlGV7z7Q zSKw@-TR>@1RL%w{x}dW#k1NgW+q4yt2Xf1J62Bx*O^WG8OJ|FqI4&@d3_o8Id@*)4 zYrk=>@!wv~mh7YWv*bZhxqSmFh2Xq)o=m;%n$I?GSz49l1$xRpPu_^N(vZ>*>Z<04 z2+rP70oM=NDysd!@fQdM2OcyT?3T^Eb@lIC-UG=Bw{BjQ&P`KCv$AcJ;?`vdZ4){d z&gkoUK{$!$$K`3*O-jyM1~p-7T*qb)Ys>Myt^;#1&a%O@x8A+E>! zY8=eD`ZG)LVagDLBeHg>=atOG?Kr%h4B%E6m@J^C+U|y)XX@f z8oyJDW|9g=<#f<{JRr{y#~euMnv)`7j=%cHWLc}ngjq~7k**6%4u>Px&W%4D94(r* z+akunK}O0DC2A%Xo9jyF;DobX?!1I(7%}@7F>i%&nk*LMO)bMGg2N+1iqtg+r(70q zF5{Msgsm5GS7DT`kBsjMvOrkx&|EU!{{~gL4d2MWrAT=KBQ-^zQCUq{5PD1orxlIL zq;CvlWx#f1NWvh`hg011I%?T_s!e38l*lWVt|~z-PO4~~1g)SrJ|>*tXh=QfXT)%( z+ex+inPvD&O4Ur;JGz>$sUOnWdpSLcm1X%aQDw4{dB!cnj`^muI$CJ2%p&-kULVCE z>$eMR36kN$wCPR+OFDM3-U(VOrp9k3)lI&YVFqd;Kpz~K)@Fa&FRw}L(SoD z9B4a+hQzZT-BnVltst&=kq6Y(f^S4hIGNKYBgMxGJ^;2yrO}P3;r)(-I-CZ)26Y6? z&rzHI_1GCvGkgy-t1E;r^3Le30|%$ebDRu2+gdLG)r=A~Qz`}~&L@aGJ{}vVs_GE* zVUjFnzHiXfKQbpv&bR&}l2bzIjAooB)=-XNcYmrGmBh(&iu@o!^hn0^#}m2yZZUK8 zufVm7Gq0y`Mj;9b>`c?&PZkU0j4>IL=UL&-Lp3j&47B5pAW4JceG{!XCA)kT<%2nqCxj<)uy6XR_uws~>_MEKPOpAQ!H zkn>FKh)<9DwwS*|Y(q?$^N!6(51O0 z^JM~Ax{AI1Oj$fs-S5d4T7Z_i1?{%0SsIuQ&r8#(JA=2iLcTN+?>wOL532%&dMYkT z*T5xepC+V6zxhS@vNbMoi|i)=rpli@R9~P!39tWbSSb904ekv7D#quKbgFEMTb48P zuq(VJ+&L8aWU(_FCD$3^uD!YM%O^K(dvy~Wm2hUuh6bD|#(I39Xt>N1Y{ZqXL`Fg6 zKQ?T2htHN!(Bx;tV2bfTtIj7e)liN-29s1kew>v(D^@)#v;}C4-G=7x#;-dM4yRWm zyY`cS21ulzMK{PoaQ6xChEZ}o_#}X-o}<&0)$1#3we?+QeLt;aVCjeA)hn!}UaKt< zat1fHEx13y-rXNMvpUUmCVzocPmN~-Y4(YJvQ#db)4|%B!rBsgAe+*yor~}FrNH08 z3V!97S}D7d$zbSD{$z;@IYMxM6aHdypIuS*pr_U6;#Y!_?0i|&yU*@16l z*dcMqDQgfNBf}?quiu4e>H)yTVfsp#f+Du0@=Kc41QockXkCkvu>FBd6Q+@FL!(Yx z2`YuX#eMEiLEDhp+9uFqME_E^faV&~9qjBHJkIp~%$x^bN=N)K@kvSVEMdDuzA0sn z88CBG?`RX1@#hQNd`o^V{37)!w|nA)QfiYBE^m=yQKv-fQF+UCMcuEe1d4BH7$?>b zJl-r9@0^Ie=)guO1vOd=i$_4sz>y3x^R7n4ED!5oXL3@5**h(xr%Hv)_gILarO46q+MaDOF%ChaymKoI6JU5Pg;7#2n9-18|S1;AK+ zgsn6;k6-%!QD>D?cFy}8F;r@z8H9xN1jsOBw2vQONVqBVEbkiNUqgw~*!^##ht>w0 zUOykwH=$LwX2j&nLy=@{hr)2O&-wm-NyjW7n~Zs9UlH;P7iP3 zI}S(r0YFVYacnKH(+{*)Tbw)@;6>%=&Th=+Z6NHo_tR|JCI8TJiXv2N7ei7M^Q+RM z?9o`meH$5Yi;@9XaNR#jIK^&{N|DYNNbtdb)XW1Lv2k{E>;?F`#Pq|&_;gm~&~Zc9 zf+6ZE%{x4|{YdtE?a^gKyzr}dA>OxQv+pq|@IXL%WS0CiX!V zm$fCePA%lU{%pTKD7|5NJHeXg=I0jL@$tOF@K*MI$)f?om)D63K*M|r`gb9edD1~Y zc|w7N)Y%do7=0{RC|AziW7#am$)9jciRJ?IWl9PE{G3U+$%FcyKs_0Cgq`=K3@ttV z9g;M!3z~f_?P%y3-ph%vBMeS@p7P&Ea8M@97+%XEj*(1E6vHj==d zjsoviB>j^$_^OI_DEPvFkVo(BGRo%cJeD){6Uckei=~1}>sp299|IRjhXe)%?uP0I zF5+>?0#Ye}T^Y$u_rc4=lPcq4K^D(TZG-w30-YiEM=dcK+4#o*>lJ8&JLi+3UcpZk z!^?95S^C0ja^jwP`|{<+3cBVog$(mRdQmadS+Vh~z zS@|P}=|z3P6uS+&@QsMp0no9Od&27O&14zHXGAOEy zh~OKpymK5C%;LLb467@KgIiVwYbYd6wFxI{0-~MOGfTq$nBTB!{SrWmL9Hs}C&l&l#m?s*{tA?BHS4mVKHAVMqm63H<|c5n0~k)-kbg zXidai&9ZUy0~WFYYKT;oe~rytRk?)r8bptITsWj(@HLI;@=v5|XUnSls7$uaxFRL+ zRVMGuL3w}NbV1`^=Pw*0?>bm8+xfeY(1PikW*PB>>Tq(FR`91N0c2&>lL2sZo5=VD zQY{>7dh_TX98L2)n{2OV=T10~*YzX27i2Q7W86M4$?gZIXZaBq#sA*{PH8){|GUi;oM>e?ua7eF4WFuFYZSG| zze?srg|5Ti8Og{O zeFxuw9!U+zhyk?@w zjsA6(oKD=Ka;A>Ca)oPORxK+kxH#O@zhC!!XS4@=swnuMk>t+JmLmFiE^1aX3f<)D@`%K0FGK^gg1a1j>zi z2KhV>sjU7AX3F$SEqrXSC}fRx64GDoc%!u2Yag68Lw@w9v;xOONf@o)Lc|Uh3<21ctTYu-mFZuHk*+R{GjXHIGq3p)tFtQp%TYqD=j1&y)>@zxoxUJ!G@ zgI0XKmP6MNzw>nRxK$-Gbzs}dyfFzt>#5;f6oR27ql!%+{tr+(`(>%51|k`ML} zY4eE)Lxq|JMas(;JibNQds1bUB&r}ydMQXBY4x(^&fY_&LlQC)3hylc$~8&~|06-D z#T+%66rYbHX%^KuqJED_wuGB+=h`nWA!>1n0)3wZrBG3%`b^Ozv6__dNa@%V14|!D zQ?o$z5u0^8`giv%qE!BzZ!3j;BlDlJDk)h@9{nSQeEk!z9RGW) z${RSF3phEM*ce*>Xdp}585vj$|40=&S{S-GTiE?Op*vY&Lvr9}BO$XWy80IF+6@%n z5*2ueT_g@ofP#u5pxb7n*fv^Xtt7&?SRc{*2Ka-*!BuOpf}neHGCiHy$@Ka1^Dint z;DkmIL$-e)rj4o2WQV%Gy;Xg(_Bh#qeOsTM2f@KEe~4kJ8kNLQ+;(!j^bgJMcNhvklP5Z6I+9Fq@c&D~8Fb-4rmDT!MB5QC{Dsb;BharP*O;SF4& zc$wj-7Oep7#$WZN!1nznc@Vb<_Dn%ga-O#J(l=OGB`dy=Sy&$(5-n3zzu%d7E#^8`T@}V+5B;PP8J14#4cCPw-SQTdGa2gWL0*zKM z#DfSXs_iWOMt)0*+Y>Lkd=LlyoHjublNLefhKBv@JoC>P7N1_#> zv=mLWe96%EY;!ZGSQDbZWb#;tzqAGgx~uk+-$+2_8U`!ypbwXl z^2E-FkM1?lY@yt8=J3%QK+xaZ6ok=-y%=KXCD^0r!5vUneW>95PzCkOPO*t}p$;-> ze5j-BLT_;)cZQzR2CEsm@rU7GZfFtdp*a|g4wDr%8?2QkIGasRfDWT-Dvy*U{?IHT z*}wGnzdlSptl#ZF^sf)KT|BJs&kLG91^A6ls{CzFprZ6-Y!V0Xysh%9p%iMd7HLsS zN+^Un$tDV)T@i!v?3o0Fsx2qI(AX_$dDkBzQ@fRM%n zRXk6hb9Py#JXUs+7)w@eo;g%QQ95Yq!K_d=z{0dGS+pToEI6=Bo8+{k$7&Z zo4>PH(`ce8E-Ps&uv`NQ;U$%t;w~|@E3WVOCi~R4oj5wP?%<*1C%}Jq%a^q~T7u>K zML5AKfQDv6>PuT`{SrKHRAF+^&edg6+5R_#H?Lz3iGoWo#PCEd0DS;)2U({{X#zU^ zw_xv{4x7|t!S)>44J;KfA|DC?;uQ($l+5Vp7oeqf7{GBF9356nx|&B~gs+@N^gSdd zvb*>&W)|u#F{Z_b`f#GVtQ`pYv3#||N{xj1NgB<#=Odt6{eB%#9RLt5v zIi|0u70`#ai}9fJjKv7dE!9ZrOIX!3{$z_K5FBd-Kp-&e4(J$LD-)NMTp^_pB`RT; zftVVlK2g@+1Ahv2$D){@Y#cL#dUj9*&%#6 zd2m9{1NYp>)6=oAvqdCn5#cx{AJ%S8skUgMglu2*IAtd+z1>B&`MuEAS(D(<6X#Lj z?f4CFx$)M&$=7*>9v1ER4b6!SIz-m0e{o0BfkySREchp?WdVPpQCh!q$t>?rL!&Jg zd#heM;&~A}VEm8Dvy&P|J*eAV&w!&Nx6HFV&B8jJFVTmgLaswn!cx$&%JbTsloz!3 zMEz1d`k==`Ueub_JAy_&`!ogbwx27^ZXgFNAbx=g_I~5nO^r)}&myw~+yY*cJl4$I znNJ32M&K=0(2Dj_>@39`3=FX!v3nZHno_@q^!y}%(yw0PqOo=);6Y@&ylVe>nMOZ~ zd>j#QQSBn3oaWd;qy$&5(5H$Ayi)0haAYO6TH>FR?rhqHmNOO+(})NB zLI@B@v0)eq!ug`>G<@htRlp3n!EpU|n+G+AvXFrWSUsLMBfL*ZB`CRsIVHNTR&b?K zxBgsN0BjfB>UVcJ|x%=-zb%OV7lmZc& zxiupadZVF7)6QuhoY;;FK2b*qL0J-Rn-8!X4ZY$-ZSUXV5DFd7`T41c(#lAeLMoeT z4%g655v@7AqT!i@)Edt5JMbN(=Q-6{=L4iG8RA%}w;&pKmtWvI4?G9pVRp|RTw`g0 zD5c12B&A2&P6Ng~8WM2eIW=wxd?r7A*N+&!Be7PX3s|7~z=APxm=A?5 zt>xB4WG|*Td@VX{Rs)PV0|yK`oI3^xn(4c_j&vgxk_Y3o(-`_5o`V zRTghg6%l@(qodXN;dB#+OKJEEvhfcnc#BeO2|E(5df-!fKDZ!%9!^BJ_4)9P+9Dq5 zK1=(v?KmIp34r?z{NEWnLB3Px{XYwy-akun4F7xTRr2^zeYW{gcK9)>aJDdU5;w5@ zak=<+-PLH-|04pelTb%ULpuuuJC7DgyT@D|p{!V!0v3KpDnRjANN12q6SUR3mb9<- z>2r~IApQGhstZ!3*?5V z8#)hJ0TdZg0M-BK#nGFP>$i=qk82DO z7h;Ft!D5E15OgW)&%lej*?^1~2=*Z5$2VX>V{x8SC+{i10BbtUk9@I#Vi&hX)q

Q!LwySI{Bnv%Sm)yh{^sSVJ8&h_D-BJ_YZe5eCaAWU9b$O2c z$T|{vWVRtOL!xC0DTc(Qbe`ItNtt5hr<)VijD0{U;T#bUEp381_y`%ZIav?kuYG{iyYdEBPW=*xNSc;Rlt6~F4M`5G+VtOjc z*0qGzCb@gME5udTjJA-9O<&TWd~}ysBd(eVT1-H82-doyH9RST)|+Pb{o*;$j9Tjs zhU!IlsPsj8=(x3bAKJTopW3^6AKROHR^7wZ185wJGVhA~hEc|LP;k7NEz-@4p5o}F z`AD6naG3(n=NF9HTH81=F+Q|JOz$7wm9I<+#BSmB@o_cLt2GkW9|?7mM;r!JZp89l zbo!Hp8=n!XH1{GwaDU+k)pGp`C|cXkCU5%vcH)+v@0eK>%7gWxmuMu9YLlChA|_D@ zi#5zovN_!a-0?~pUV-Rj*1P)KwdU-LguR>YM&*Nen+ln8Q$?WFCJg%DY%K}2!!1FE zDv-A%Cbwo^p(lzac&_TZ-l#9kq`mhLcY3h9ZTUVCM(Ad&=EriQY5{jJv<5K&g|*Lk zgV%ILnf1%8V2B0E&;Sp4sYbYOvvMebLwYwzkRQ#F8GpTQq#uv=J`uaSJ34OWITeSGo6+-8Xw znCk*n{kdDEi)Hi&u^)~cs@iyCkFWB2SWZU|Uc%^43ZIZQ-vWNExCCtDWjqHs;;tWf$v{}0{p0Rvxkq``)*>+Akq%|Na zA`@~-Vfe|+(AIlqru+7Ceh4nsVmO9p9jc8}HX^W&ViBDXT+uXbT#R#idPn&L>+#b6 zflC-4C5-X;kUnR~L>PSLh*gvL68}RBsu#2l`s_9KjUWRhiqF`j)`y`2`YU(>3bdBj z?>iyjEhe-~$^I5!nn%B6Wh+I`FvLNvauve~eX<+Ipl&04 zT}};W&1a3%W?dJ2=N#0t?e+aK+%t}5q%jSLvp3jZ%?&F}nOOWr>+{GFIa%wO_2`et z=JzoRR~}iKuuR+azPI8;Gf9)z3kyA4EIOSl!sRR$DlW}0>&?GbgPojmjmnln;cTqCt=ADbE zZ8GAnoM+S1(5$i8^O4t`ue;vO4i}z0wz-QEIVe5_u03;}-!G1NyY8;h^}y;tzY}i5 zqQr#Ur3Fy8sSa$Q0ys+f`!`+>9WbvU_I`Sj;$4{S>O3?#inLHCrtLy~!s#WXV=oVP zeE93*Nc`PBi4q@%Ao$x4lw9vLHM!6mn3-b_cebF|n-2vt-zYVF_&sDE--J-P;2WHo z+@n2areE0o$LjvjlV2X7ZU@j+`{*8zq`JR3gKF#EW|#+{nMyo-a>nFFTg&vhyT=b} zDa8+v0(Dgx0yRL@ZXOYIlVSZ0|MFizy0VPW8;AfA5|pe!#j zX}Py^8fl5SyS4g1WSKKtnyP+_PoOwMMwu`(i@Z)diJp~U54*-miOchy7Z35eL>^M z4p<-aIxH4VUZgS783@H%M7P9hX>t{|RU7$n4T(brCG#h9e9p! z+o`i;EGGq3&pF;~5V~eBD}lC)>if$w%Vf}AFxGqO88|ApfHf&Bvu+xdG)@vuF}Yvk z)o;~k-%+0K0g+L`Wala!$=ZV|z$e%>f0%XoLib%)!R^RoS+{!#X?h-6uu zF&&KxORdZU&EwQFITIRLo(7TA3W}y6X{?Y%y2j0It!ekU#<)$qghZtpcS>L3uh`Uj z7GY;6f$9qKynP#oS3$$a{p^{D+0oJQ71`1?OAn_m8)UGZmj3l*ZI)`V-a>MKGGFG< z&^jg#Ok%(hhm>hSrZ5;Qga4u(?^i>GiW_j9%_7M>j(^|Om$#{k+^*ULnEgzW_1gCICtAD^WpC`A z{9&DXkG#01Xo)U$OC(L5Y$DQ|Q4C6CjUKk1UkPj$nXH##J{c8e#K|&{mA*;b$r0E4 zUNo0jthwA(c&N1l=PEe8Rw_8cEl|-eya9z&H3#n`B$t#+aJ03RFMzrV@gowbe8v(c zIFM60^0&lCFO10NU4w@|61xiZ4CVXeaKjd;d?sv52XM*lS8XiVjgWpRB;&U_C0g+`6B5V&w|O6B*_q zsATxL!M}+$He)1eOWECce#eS@2n^xhlB4<_Nn?yCVEQWDs(r`|@2GqLe<#(|&P0U? z$7V5IgpWf09uIf_RazRwC?qEqRaHyL?iiS05UiGesJy%^>-C{{ypTBI&B0-iUYhk> zIk<5xpsuV@g|z(AZD+C-;A!fTG=df1=<%nxy(a(IS+U{ME4ZbDEBtcD_3V=icT6*_ z)>|J?>&6%nvHhZERBtjK+s4xnut*@>GAmA5m*OTp$!^CHTr}vM4n(X1Q*;{e-Rd2BCF-u@1ZGm z!S8hJ6L=Gl4T_SDa7Xx|-{4mxveJg=ctf`BJ*fy!yF6Dz&?w(Q_6B}WQVtNI!BVBC zKfX<>7vd6C96}XAQmF-Jd?1Q4eTfRB3q7hCh0f!(JkdWT5<{iAE#dKy*Jxq&3a1@~ z8C||Dn2mFNyrUV|<-)C^_y7@8c2Fz+2jrae9deBDu;U}tJ{^xAdxCD248(k;dCJ%o z`y3sADe>U%suxwwv~8A1+R$VB=Q?%U?4joI$um;aH+eCrBqpn- z%79D_7rb;R-;-9RTrwi9dPlg8&@tfWhhZ(Vx&1PQ+6(huX`;M9x~LrW~~#3{j0Bh2kDU$}@!fFQej4VGkJv?M4rU^x!RU zEwhu$!CA_iDjFjrJa`aocySDX16?~;+wgav;}Zut6Mg%C4>}8FL?8)Kgwc(Qlj{@#2Pt0?G`$h7P#M+qoXtlV@d}%c&OzO+QYKK`kyXaK{U(O^2DyIXCZlNQjt0^8~8JzNGrIxhj}}M z&~QZlbx%t;MJ(Vux;2tgNKGlAqphLq%pd}JG9uoVHUo?|hN{pLQ6Em%r*+7t^<);X zm~6=qChlNAVXNN*Sow->*4;}T;l;D1I-5T{Bif@4_}=>l`tK;qqDdt5zvisCKhMAH z#r}`)7VW?LZqfdmXQ%zo5bJ00{Xb9^YKrk0Nf|oIW*K@(=`o2Vndz}ZDyk{!u}PVx zzd--+_WC*U{~DH3{?GI64IB+@On&@9X>EUAo&L+G{L^dozaI4C3G#2wr~hseW@K&g zKWs{uHu-9Je!3;4pE>eBltKUXb^*hG8I&413)$J&{D4N%7PcloU6bn%jPxJyQL?g* z9g+YFFEDiE`8rW^laCNzQmi7CTnPfwyg3VDHRAl>h=In6jeaVOP@!-CP60j3+#vpL zEYmh_oP0{-gTe7Or`L6x)6w?77QVi~jD8lWN@3RHcm80iV%M1A!+Y6iHM)05iC64tb$X2lV_%Txk@0l^hZqi^%Z?#- zE;LE0uFx)R08_S-#(wC=dS&}vj6P4>5ZWjhthP=*Hht&TdLtKDR;rXEX4*z0h74FA zMCINqrh3Vq;s%3MC1YL`{WjIAPkVL#3rj^9Pj9Ss7>7duy!9H0vYF%>1jh)EPqvlr6h%R%CxDsk| z!BACz7E%j?bm=pH6Eaw{+suniuY7C9Ut~1cWfOX9KW9=H><&kQlinPV3h9R>3nJvK z4L9(DRM=x;R&d#a@oFY7mB|m8h4692U5eYfcw|QKwqRsshN(q^v$4$)HgPpAJDJ`I zkqjq(8Cd!K!+wCd=d@w%~e$=gdUgD&wj$LQ1r>-E=O@c ze+Z$x{>6(JA-fNVr)X;*)40Eym1TtUZI1Pwwx1hUi+G1Jlk~vCYeXMNYtr)1?qwyg zsX_e*$h?380O00ou?0R@7-Fc59o$UvyVs4cUbujHUA>sH!}L54>`e` zHUx#Q+Hn&Og#YVOuo*niy*GU3rH;%f``nk#NN5-xrZ34NeH$l`4@t);4(+0|Z#I>Y z)~Kzs#exIAaf--65L0UHT_SvV8O2WYeD>Mq^Y6L!Xu8%vnpofG@w!}R7M28?i1*T&zp3X4^OMCY6(Dg<-! zXmcGQrRgHXGYre7GfTJ)rhl|rs%abKT_Nt24_Q``XH{88NVPW+`x4ZdrMuO0iZ0g` z%p}y};~T5gbb9SeL8BSc`SO#ixC$@QhXxZ=B}L`tP}&k?1oSPS=4%{UOHe0<_XWln zwbl5cn(j-qK`)vGHY5B5C|QZd5)W7c@{bNVXqJ!!n$^ufc?N9C-BF2QK1(kv++h!>$QbAjq)_b$$PcJdV+F7hz0Hu@ zqj+}m0qn{t^tD3DfBb~0B36|Q`bs*xs|$i^G4uNUEBl4g;op-;Wl~iThgga?+dL7s zUP(8lMO?g{GcYpDS{NM!UA8Hco?#}eNEioRBHy4`mq!Pd-9@-97|k$hpEX>xoX+dY zDr$wfm^P&}Wu{!%?)U_(%Mn79$(ywvu*kJ9r4u|MyYLI_67U7%6Gd_vb##Nerf@>& z8W11z$$~xEZt$dPG}+*IZky+os5Ju2eRi;1=rUEeIn>t-AzC_IGM-IXWK3^6QNU+2pe=MBn4I*R@A%-iLDCOHTE-O^wo$sL_h{dcPl=^muAQb`_BRm};=cy{qSkui;`WSsj9%c^+bIDQ z0`_?KX0<-=o!t{u(Ln)v>%VGL z0pC=GB7*AQ?N7N{ut*a%MH-tdtNmNC+Yf$|KS)BW(gQJ*z$d{+{j?(e&hgTy^2|AR9vx1Xre2fagGv0YXWqtNkg*v%40v?BJBt|f9wX5 z{QTlCM}b-0{mV?IG>TW_BdviUKhtosrBqdfq&Frdz>cF~yK{P@(w{Vr7z2qKFwLhc zQuogKO@~YwyS9%+d-zD7mJG~@?EFJLSn!a&mhE5$_4xBl&6QHMzL?CdzEnC~C3$X@ zvY!{_GR06ep5;<#cKCSJ%srxX=+pn?ywDwtJ2{TV;0DKBO2t++B(tIO4)Wh`rD13P z4fE$#%zkd=UzOB74gi=-*CuID&Z3zI^-`4U^S?dHxK8fP*;fE|a(KYMgMUo`THIS1f!*6dOI2 zFjC3O=-AL`6=9pp;`CYPTdVX z8(*?V&%QoipuH0>WKlL8A*zTKckD!paN@~hh zmXzm~qZhMGVdQGd=AG8&20HW0RGV8X{$9LldFZYm zE?}`Q3i?xJRz43S?VFMmqRyvWaS#(~Lempg9nTM$EFDP(Gzx#$r)W&lpFKqcAoJh-AxEw$-bjW>`_+gEi z2w`99#UbFZGiQjS8kj~@PGqpsPX`T{YOj`CaEqTFag;$jY z8_{Wzz>HXx&G*Dx<5skhpETxIdhKH?DtY@b9l8$l?UkM#J-Snmts7bd7xayKTFJ(u zyAT&@6cAYcs{PBfpqZa%sxhJ5nSZBPji?Zlf&}#L?t)vC4X5VLp%~fz2Sx<*oN<7` z?ge=k<=X7r<~F7Tvp9#HB{!mA!QWBOf%EiSJ6KIF8QZNjg&x~-%e*tflL(ji_S^sO ztmib1rp09uon}RcsFi#k)oLs@$?vs(i>5k3YN%$T(5Or(TZ5JW9mA6mIMD08=749$ z!d+l*iu{Il7^Yu}H;lgw=En1sJpCKPSqTCHy4(f&NPelr31^*l%KHq^QE>z>Ks_bH zjbD?({~8Din7IvZeJ>8Ey=e;I?thpzD=zE5UHeO|neioJwG;IyLk?xOz(yO&0DTU~ z^#)xcs|s>Flgmp;SmYJ4g(|HMu3v7#;c*Aa8iF#UZo7CvDq4>8#qLJ|YdZ!AsH%^_7N1IQjCro

K7UpUK$>l@ zw`1S}(D?mUXu_C{wupRS-jiX~w=Uqqhf|Vb3Cm9L=T+w91Cu^ z*&Ty%sN?x*h~mJc4g~k{xD4ZmF%FXZNC;oVDwLZ_WvrnzY|{v8hc1nmx4^}Z;yriXsAf+Lp+OFLbR!&Ox?xABwl zu8w&|5pCxmu#$?Cv2_-Vghl2LZ6m7}VLEfR5o2Ou$x02uA-%QB2$c(c1rH3R9hesc zfpn#oqpbKuVsdfV#cv@5pV4^f_!WS+F>SV6N0JQ9E!T90EX((_{bSSFv9ld%I0&}9 zH&Jd4MEX1e0iqDtq~h?DBrxQX1iI0lIs<|kB$Yrh&cpeK0-^K%=FBsCBT46@h#yi!AyDq1V(#V}^;{{V*@T4WJ&U-NTq43w=|K>z8%pr_nC>%C(Wa_l78Ufib$r8Od)IIN=u>417 z`Hl{9A$mI5A(;+-Q&$F&h-@;NR>Z<2U;Y21>>Z;s@0V@SbkMQQj%_;~+qTuQ?c|AV zcWm3XZQHhP&R%QWarS%mJ!9R^&!_)*s(v+VR@I#QrAT}`17Y+l<`b-nvmDNW`De%y zrwTZ9EJrj1AFA>B`1jYDow}~*dfPs}IZMO3=a{Fy#IOILc8F0;JS4x(k-NSpbN@qM z`@aE_e}5{!$v3+qVs7u?sOV(y@1Os*Fgu`fCW9=G@F_#VQ%xf$hj0~wnnP0$hFI+@ zkQj~v#V>xn)u??YutKsX>pxKCl^p!C-o?+9;!Nug^ z{rP!|+KsP5%uF;ZCa5F;O^9TGac=M|=V z_H(PfkV1rz4jl?gJ(ArXMyWT4y(86d3`$iI4^l9`vLdZkzpznSd5Ikfrs8qcSy&>z zTIZgWZGXw0n9ibQxYWE@gI0(3#KA-dAdPcsL_|hg2@~C!VZDM}5;v_Nykfq!*@*Zf zE_wVgx82GMDryKO{U{D>vSzSc%B~|cjDQrt5BN=Ugpsf8H8f1lR4SGo#hCuXPL;QQ z#~b?C4MoepT3X`qdW2dNn& zo8)K}%Lpu>0tQei+{>*VGErz|qjbK#9 zvtd8rcHplw%YyQCKR{kyo6fgg!)6tHUYT(L>B7er5)41iG`j$qe*kSh$fY!PehLcD zWeKZHn<492B34*JUQh=CY1R~jT9Jt=k=jCU2=SL&&y5QI2uAG2?L8qd2U(^AW#{(x zThSy=C#>k+QMo^7caQcpU?Qn}j-`s?1vXuzG#j8(A+RUAY})F@=r&F(8nI&HspAy4 z4>(M>hI9c7?DCW8rw6|23?qQMSq?*Vx?v30U%luBo)B-k2mkL)Ljk5xUha3pK>EEj z@(;tH|M@xkuN?gsz;*bygizwYR!6=(Xgcg^>WlGtRYCozY<rFX2E>kaZo)O<^J7a`MX8Pf`gBd4vrtD|qKn&B)C&wp0O-x*@-|m*0egT=-t@%dD zgP2D+#WPptnc;_ugD6%zN}Z+X4=c61XNLb7L1gWd8;NHrBXwJ7s0ce#lWnnFUMTR& z1_R9Fin4!d17d4jpKcfh?MKRxxQk$@)*hradH2$3)nyXep5Z;B z?yX+-Bd=TqO2!11?MDtG0n(*T^!CIiF@ZQymqq1wPM_X$Iu9-P=^}v7npvvPBu!d$ z7K?@CsA8H38+zjA@{;{kG)#AHME>Ix<711_iQ@WWMObXyVO)a&^qE1GqpP47Q|_AG zP`(AD&r!V^MXQ^e+*n5~Lp9!B+#y3#f8J^5!iC@3Y@P`;FoUH{G*pj*q7MVV)29+j z>BC`a|1@U_v%%o9VH_HsSnM`jZ-&CDvbiqDg)tQEnV>b%Ptm)T|1?TrpIl)Y$LnG_ zzKi5j2Fx^K^PG1=*?GhK;$(UCF-tM~^=Z*+Wp{FSuy7iHt9#4n(sUuHK??@v+6*|10Csdnyg9hAsC5_OrSL;jVkLlf zHXIPukLqbhs~-*oa^gqgvtpgTk_7GypwH><53riYYL*M=Q@F-yEPLqQ&1Sc zZB%w}T~RO|#jFjMWcKMZccxm-SL)s_ig?OC?y_~gLFj{n8D$J_Kw%{r0oB8?@dWzn zB528d-wUBQzrrSSLq?fR!K%59Zv9J4yCQhhDGwhptpA5O5U?Hjqt>8nOD zi{)0CI|&Gu%zunGI*XFZh(ix)q${jT8wnnzbBMPYVJc4HX*9d^mz|21$=R$J$(y7V zo0dxdbX3N#=F$zjstTf*t8vL)2*{XH!+<2IJ1VVFa67|{?LP&P41h$2i2;?N~RA30LV`BsUcj zfO9#Pg1$t}7zpv#&)8`mis3~o+P(DxOMgz-V*(?wWaxi?R=NhtW}<#^Z?(BhSwyar zG|A#Q7wh4OfK<|DAcl9THc-W4*>J4nTevsD%dkj`U~wSUCh15?_N@uMdF^Kw+{agk zJ`im^wDqj`Ev)W3k3stasP`88-M0ZBs7;B6{-tSm3>I@_e-QfT?7|n0D~0RRqDb^G zyHb=is;IwuQ&ITzL4KsP@Z`b$d%B0Wuhioo1CWttW8yhsER1ZUZzA{F*K=wmi-sb#Ju+j z-l@In^IKnb{bQG}Ps>+Vu_W#grNKNGto+yjA)?>0?~X`4I3T@5G1)RqGUZuP^NJCq&^HykuYtMDD8qq+l8RcZNJsvN(10{ zQ1$XcGt}QH-U^WU!-wRR1d--{B$%vY{JLWIV%P4-KQuxxDeJaF#{eu&&r!3Qu{w}0f--8^H|KwE>)ORrcR+2Qf zb})DRcH>k0zWK8@{RX}NYvTF;E~phK{+F;MkIP$)T$93Ba2R2TvKc>`D??#mv9wg$ zd~|-`Qx5LwwsZ2hb*Rt4S9dsF%Cny5<1fscy~)d;0m2r$f=83<->c~!GNyb!U)PA; zq^!`@@)UaG)Ew(9V?5ZBq#c%dCWZrplmuM`o~TyHjAIMh0*#1{B>K4po-dx$Tk-Cq z=WZDkP5x2W&Os`N8KiYHRH#UY*n|nvd(U>yO=MFI-2BEp?x@=N<~CbLJBf6P)}vLS?xJXYJ2^<3KJUdrwKnJnTp{ zjIi|R=L7rn9b*D#Xxr4*R<3T5AuOS+#U8hNlfo&^9JO{VbH!v9^JbK=TCGR-5EWR@ zN8T-_I|&@A}(hKeL4_*eb!1G8p~&_Im8|wc>Cdir+gg90n1dw?QaXcx6Op_W1r=axRw>4;rM*UOpT#Eb9xU1IiWo@h?|5uP zka>-XW0Ikp@dIe;MN8B01a7+5V@h3WN{J=HJ*pe0uwQ3S&MyWFni47X32Q7SyCTNQ z+sR!_9IZa5!>f&V$`q!%H8ci!a|RMx5}5MA_kr+bhtQy{-^)(hCVa@I!^TV4RBi zAFa!Nsi3y37I5EK;0cqu|9MRj<^r&h1lF}u0KpKQD^5Y+LvFEwM zLU@@v4_Na#Axy6tn3P%sD^5P#<7F;sd$f4a7LBMk zGU^RZHBcxSA%kCx*eH&wgA?Qwazm8>9SCSz_!;MqY-QX<1@p$*T8lc?@`ikEqJ>#w zcG``^CoFMAhdEXT9qt47g0IZkaU)4R7wkGs^Ax}usqJ5HfDYAV$!=6?>J6+Ha1I<5 z|6=9soU4>E))tW$<#>F ziZ$6>KJf0bPfbx_)7-}tMINlc=}|H+$uX)mhC6-Hz+XZxsKd^b?RFB6et}O#+>Wmw9Ec9) z{q}XFWp{3@qmyK*Jvzpyqv57LIR;hPXKsrh{G?&dRjF%Zt5&m20Ll?OyfUYC3WRn{cgQ?^V~UAv+5 z&_m#&nIwffgX1*Z2#5^Kl4DbE#NrD&Hi4|7SPqZ}(>_+JMz=s|k77aEL}<=0Zfb)a z%F(*L3zCA<=xO)2U3B|pcTqDbBoFp>QyAEU(jMu8(jLA61-H!ucI804+B!$E^cQQa z)_ERrW3g!B9iLb3nn3dlkvD7KsY?sRvls3QC0qPi>o<)GHx%4Xb$5a3GBTJ(k@`e@ z$RUa^%S15^1oLEmA=sayrP5;9qtf!Z1*?e$ORVPsXpL{jL<6E)0sj&swP3}NPmR%FM?O>SQgN5XfHE< zo(4#Cv11(%Nnw_{_Ro}r6=gKd{k?NebJ~<~Kv0r(r0qe4n3LFx$5%x(BKvrz$m?LG zjLIc;hbj0FMdb9aH9Lpsof#yG$(0sG2%RL;d(n>;#jb!R_+dad+K;Ccw!|RY?uS(a zj~?=&M!4C(5LnlH6k%aYvz@7?xRa^2gml%vn&eKl$R_lJ+e|xsNfXzr#xuh(>`}9g zLHSyiFwK^-p!;p$yt7$F|3*IfO3Mlu9e>Dpx8O`37?fA`cj`C0B-m9uRhJjs^mRp# zWB;Aj6|G^1V6`jg7#7V9UFvnB4((nIwG?k%c7h`?0tS8J3Bn0t#pb#SA}N-|45$-j z$R>%7cc2ebAClXc(&0UtHX<>pd)akR3Kx_cK+n<}FhzmTx!8e9^u2e4%x{>T6pQ`6 zO182bh$-W5A3^wos0SV_TgPmF4WUP-+D25KjbC{y_6W_9I2_vNKwU(^qSdn&>^=*t z&uvp*@c8#2*paD!ZMCi3;K{Na;I4Q35zw$YrW5U@Kk~)&rw;G?d7Q&c9|x<Hg|CNMsxovmfth*|E*GHezPTWa^Hd^F4!B3sF;)? z(NaPyAhocu1jUe(!5Cy|dh|W2=!@fNmuNOzxi^tE_jAtzNJ0JR-avc_H|ve#KO}#S z#a(8secu|^Tx553d4r@3#6^MHbH)vmiBpn0X^29xEv!Vuh1n(Sr5I0V&`jA2;WS|Y zbf0e}X|)wA-Pf5gBZ>r4YX3Mav1kKY(ulAJ0Q*jB)YhviHK)w!TJsi3^dMa$L@^{` z_De`fF4;M87vM3Ph9SzCoCi$#Fsd38u!^0#*sPful^p5oI(xGU?yeYjn;Hq1!wzFk zG&2w}W3`AX4bxoVm03y>ts{KaDf!}b&7$(P4KAMP=vK5?1In^-YYNtx1f#}+2QK@h zeSeAI@E6Z8a?)>sZ`fbq9_snl6LCu6g>o)rO;ijp3|$vig+4t} zylEo7$SEW<_U+qgVcaVhk+4k+C9THI5V10qV*dOV6pPtAI$)QN{!JRBKh-D zk2^{j@bZ}yqW?<#VVuI_27*cI-V~sJiqQv&m07+10XF+#ZnIJdr8t`9s_EE;T2V;B z4UnQUH9EdX%zwh-5&wflY#ve!IWt0UE-My3?L#^Bh%kcgP1q{&26eXLn zTkjJ*w+(|_>Pq0v8{%nX$QZbf)tbJaLY$03;MO=Ic-uqYUmUCuXD>J>o6BCRF=xa% z3R4SK9#t1!K4I_d>tZgE>&+kZ?Q}1qo4&h%U$GfY058s%*=!kac{0Z+4Hwm!)pFLR zJ+5*OpgWUrm0FPI2ib4NPJ+Sk07j(`diti^i#kh&f}i>P4~|d?RFb#!JN)~D@)beox}bw?4VCf^y*`2{4`-@%SFTry2h z>9VBc9#JxEs1+0i2^LR@B1J`B9Ac=#FW=(?2;5;#U$0E0UNag_!jY$&2diQk_n)bT zl5Me_SUvqUjwCqmVcyb`igygB_4YUB*m$h5oeKv3uIF0sk}~es!{D>4r%PC*F~FN3owq5e0|YeUTSG#Vq%&Gk7uwW z0lDo#_wvflqHeRm*}l?}o;EILszBt|EW*zNPmq#?4A+&i0xx^?9obLyY4xx=Y9&^G;xYXYPxG)DOpPg!i_Ccl#3L}6xAAZzNhPK1XaC_~ z!A|mlo?Be*8Nn=a+FhgpOj@G7yYs(Qk(8&|h@_>w8Y^r&5nCqe0V60rRz?b5%J;GYeBqSAjo|K692GxD4` zRZyM2FdI+-jK2}WAZTZ()w_)V{n5tEb@>+JYluDozCb$fA4H)$bzg(Ux{*hXurjO^ zwAxc+UXu=&JV*E59}h3kzQPG4M)X8E*}#_&}w*KEgtX)cU{vm9b$atHa;s>| z+L6&cn8xUL*OSjx4YGjf6{Eq+Q3{!ZyhrL&^6Vz@jGbI%cAM9GkmFlamTbcQGvOlL zmJ?(FI)c86=JEs|*;?h~o)88>12nXlpMR4@yh%qdwFNpct;vMlc=;{FSo*apJ;p}! zAX~t;3tb~VuP|ZW;z$=IHf->F@Ml)&-&Bnb{iQyE#;GZ@C$PzEf6~q}4D>9jic@mTO5x76ulDz@+XAcm35!VSu zT*Gs>;f0b2TNpjU_BjHZ&S6Sqk6V1370+!eppV2H+FY!q*n=GHQ!9Rn6MjY!Jc77A zG7Y!lFp8?TIHN!LXO?gCnsYM-gQxsm=Ek**VmZu7vnuufD7K~GIxfxbsQ@qv2T zPa`tvHB$fFCyZl>3oYg?_wW)C>^_iDOc^B7klnTOoytQH18WkOk)L2BSD0r%xgRSW zQS9elF^?O=_@|58zKLK;(f77l-Zzu}4{fXed2saq!5k#UZAoDBqYQS{sn@j@Vtp|$ zG%gnZ$U|9@u#w1@11Sjl8ze^Co=)7yS(}=;68a3~g;NDe_X^}yJj;~s8xq9ahQ5_r zxAlTMnep*)w1e(TG%tWsjo3RR;yVGPEO4V{Zp?=a_0R#=V^ioQu4YL=BO4r0$$XTX zZfnw#_$V}sDAIDrezGQ+h?q24St0QNug_?{s-pI(^jg`#JRxM1YBV;a@@JQvH8*>> zIJvku74E0NlXkYe_624>znU0J@L<-c=G#F3k4A_)*;ky!C(^uZfj%WB3-*{*B$?9+ zDm$WFp=0(xnt6`vDQV3Jl5f&R(Mp};;q8d3I%Kn>Kx=^;uSVCw0L=gw53%Bp==8Sw zxtx=cs!^-_+i{2OK`Q;913+AXc_&Z5$@z3<)So0CU3;JAv=H?@Zpi~riQ{z-zLtVL z!oF<}@IgJp)Iyz1zVJ42!SPHSkjYNS4%ulVVIXdRuiZ@5Mx8LJS}J#qD^Zi_xQ@>DKDr-_e#>5h3dtje*NcwH_h;i{Sx7}dkdpuW z(yUCjckQsagv*QGMSi9u1`Z|V^}Wjf7B@q%j2DQXyd0nOyqg%m{CK_lAoKlJ7#8M} z%IvR?Vh$6aDWK2W!=i?*<77q&B8O&3?zP(Cs@kapc)&p7En?J;t-TX9abGT#H?TW? ztO5(lPKRuC7fs}zwcUKbRh=7E8wzTsa#Z{a`WR}?UZ%!HohN}d&xJ=JQhpO1PI#>X zHkb>pW04pU%Bj_mf~U}1F1=wxdBZu1790>3Dm44bQ#F=T4V3&HlOLsGH)+AK$cHk6 zia$=$kog?)07HCL*PI6}DRhpM^*%I*kHM<#1Se+AQ!!xyhcy6j7`iDX7Z-2i73_n# zas*?7LkxS-XSqv;YBa zW_n*32D(HTYQ0$feV_Fru1ZxW0g&iwqixPX3=9t4o)o|kOo79V$?$uh?#8Q8e>4e)V6;_(x&ViUVxma+i25qea;d-oK7ouuDsB^ab{ zu1qjQ%`n56VtxBE#0qAzb7lph`Eb-}TYpXB!H-}3Ykqyp`otprp7{VEuW*^IR2n$Fb99*nAtqT&oOFIf z@w*6>YvOGw@Ja?Pp1=whZqydzx@9X4n^2!n83C5{C?G@|E?&$?p*g68)kNvUTJ)I6 z1Q|(#UuP6pj78GUxq11m-GSszc+)X{C2eo-?8ud9sB=3(D47v?`JAa{V(IF zPZQ_0AY*9M97>Jf<o%#O_%Wq}8>YM=q0|tGY+hlXcpE=Z4Od z`NT7Hu2hnvRoqOw@g1f=bv`+nba{GwA$Ak0INlqI1k<9!x_!sL()h?hEWoWrdU3w` zZ%%)VR+Bc@_v!C#koM1p-3v_^L6)_Ktj4HE>aUh%2XZE@JFMOn)J~c`_7VWNb9c-N z2b|SZMR4Z@E7j&q&9(6H3yjEu6HV7{2!1t0lgizD;mZ9$r(r7W5G$ky@w(T_dFnOD z*p#+z$@pKE+>o@%eT(2-p_C}wbQ5s(%Sn_{$HDN@MB+Ev?t@3dPy`%TZ!z}AThZSu zN<1i$siJhXFdjV zP*y|V<`V8t=h#XTRUR~5`c`Z9^-`*BZf?WAehGdg)E2Je)hqFa!k{V(u+(hTf^Yq& zoruUh2(^3pe)2{bvt4&4Y9CY3js)PUHtd4rVG57}uFJL)D(JfSIo^{P=7liFXG zq5yqgof0V8paQcP!gy+;^pp-DA5pj=gbMN0eW=-eY+N8~y+G>t+x}oa!5r>tW$xhI zPQSv=pi;~653Gvf6~*JcQ%t1xOrH2l3Zy@8AoJ+wz@daW@m7?%LXkr!bw9GY@ns3e zSfuWF_gkWnesv?s3I`@}NgE2xwgs&rj?kH-FEy82=O8`+szN ziHch`vvS`zNfap14!&#i9H@wF7}yIPm=UB%(o(}F{wsZ(wA0nJ2aD^@B41>>o-_U6 zUqD~vdo48S8~FTb^+%#zcbQiiYoDKYcj&$#^;Smmb+Ljp(L=1Kt_J!;0s%1|JK}Wi z;={~oL!foo5n8=}rs6MmUW~R&;SIJO3TL4Ky?kh+b2rT9B1Jl4>#Uh-Bec z`Hsp<==#UEW6pGPhNk8H!!DUQR~#F9jEMI6T*OWfN^Ze&X(4nV$wa8QUJ>oTkruH# zm~O<`J7Wxseo@FqaZMl#Y(mrFW9AHM9Kb|XBMqaZ2a)DvJgYipkDD_VUF_PKd~dT7 z#02}bBfPn9a!X!O#83=lbJSK#E}K&yx-HI#T6ua)6o0{|={*HFusCkHzs|Fn&|C3H zBck1cmfcWVUN&i>X$YU^Sn6k2H;r3zuXbJFz)r5~3$d$tUj(l1?o={MM){kjgqXRO zc5R*#{;V7AQh|G|)jLM@wGAK&rm2~@{Pewv#06pHbKn#wL0P6F1!^qw9g&cW3Z=9} zj)POhOlwsh@eF=>z?#sIs*C-Nl(yU!#DaiaxhEs#iJqQ8w%(?+6lU02MYSeDkr!B- zPjMv+on6OLXgGnAtl(ao>|X2Y8*Hb}GRW5}-IzXnoo-d0!m4Vy$GS!XOLy>3_+UGs z2D|YcQx@M#M|}TDOetGi{9lGo9m-=0-^+nKE^*?$^uHkxZh}I{#UTQd;X!L+W@jm( zDg@N4+lUqI92o_rNk{3P>1gxAL=&O;x)ZT=q1mk0kLlE$WeWuY_$0`0jY-Kkt zP*|m3AF}Ubd=`<>(Xg0har*_@x2YH}bn0Wk*OZz3*e5;Zc;2uBdnl8?&XjupbkOeNZsNh6pvsq_ydmJI+*z**{I{0K)-;p1~k8cpJXL$^t!-`E}=*4G^-E8>H!LjTPxSx zcF+cS`ommfKMhNSbas^@YbTpH1*RFrBuATUR zt{oFWSk^$xU&kbFQ;MCX22RAN5F6eq9UfR$ut`Jw--p2YX)A*J69m^!oYfj2y7NYcH6&r+0~_sH^c^nzeN1AU4Ga7=FlR{S|Mm~MpzY0$Z+p2W(a={b-pR9EO1Rs zB%KY|@wLcAA@)KXi!d2_BxrkhDn`DT1=Dec}V!okd{$+wK z4E{n8R*xKyci1(CnNdhf$Dp2(Jpof0-0%-38X=Dd9PQgT+w%Lshx9+loPS~MOm%ZT zt%2B2iL_KU_ita%N>xjB!#71_3=3c}o zgeW~^U_ZTJQ2!PqXulQd=3b=XOQhwATK$y(9$#1jOQ4}4?~l#&nek)H(04f(Sr=s| zWv7Lu1=%WGk4FSw^;;!8&YPM)pQDCY9DhU`hMty1@sq1=Tj7bFsOOBZOFlpR`W>-J$-(kezWJj;`?x-v>ev{*8V z8p|KXJPV$HyQr1A(9LVrM47u-XpcrIyO`yWvx1pVYc&?154aneRpLqgx)EMvRaa#|9?Wwqs2+W8n5~79G z(}iCiLk;?enn}ew`HzhG+tu+Ru@T+K5juvZN)wY;x6HjvqD!&!)$$;1VAh~7fg0K| zEha#aN=Yv|3^~YFH}cc38ovVb%L|g@9W6fo(JtT6$fa?zf@Ct88e}m?i)b*Jgc{fl zExfdvw-BYDmH6>(4QMt#p0;FUIQqkhD}aH?a7)_%JtA~soqj{ppP_82yi9kaxuK>~ ze_)Zt>1?q=ZH*kF{1iq9sr*tVuy=u>Zev}!gEZx@O6-fjyu9X00gpIl-fS_pzjpqJ z1yqBmf9NF!jaF<+YxgH6oXBdK)sH(>VZ)1siyA$P<#KDt;8NT*l_0{xit~5j1P)FN zI8hhYKhQ)i z37^aP13B~u65?sg+_@2Kr^iWHN=U;EDSZ@2W2!5ALhGNWXnFBY%7W?1 z=HI9JzQ-pLKZDYTv<0-lt|6c-RwhxZ)mU2Os{bsX_i^@*fKUj8*aDO5pks=qn3Dv6 zwggpKLuyRCTVPwmw1r}B#AS}?X7b837UlXwp~E2|PJw2SGVueL7){Y&z!jL!XN=0i zU^Eig`S2`{+gU$68aRdWx?BZ{sU_f=8sn~>s~M?GU~`fH5kCc; z8ICp+INM3(3{#k32RZdv6b9MQYdZXNuk7ed8;G?S2nT+NZBG=Tar^KFl2SvhW$bGW#kdWL-I)s_IqVnCDDM9fm8g;P;8 z7t4yZn3^*NQfx7SwmkzP$=fwdC}bafQSEF@pd&P8@H#`swGy_rz;Z?Ty5mkS%>m#% zp_!m9e<()sfKiY(nF<1zBz&&`ZlJf6QLvLhl`_``%RW&{+O>Xhp;lwSsyRqGf=RWd zpftiR`={2(siiPAS|p}@q=NhVc0ELprt%=fMXO3B)4ryC2LT(o=sLM7hJC!}T1@)E zA3^J$3&1*M6Xq>03FX`R&w*NkrZE?FwU+Muut;>qNhj@bX17ZJxnOlPSZ=Zeiz~T_ zOu#yc3t6ONHB;?|r4w+pI)~KGN;HOGC)txxiUN8#mexj+W(cz%9a4sx|IRG=}ia zuEBuba3AHsV2feqw-3MvuL`I+2|`Ud4~7ZkN=JZ;L20|Oxna5vx1qbIh#k2O4$RQF zo`tL()zxaqibg^GbB+BS5#U{@K;WWQj~GcB1zb}zJkPwH|5hZ9iH2308!>_;%msji zJHSL~s)YHBR=Koa1mLEOHos*`gp=s8KA-C zu0aE+W!#iJ*0xqKm3A`fUGy#O+X+5W36myS>Uh2!R*s$aCU^`K&KKLCCDkejX2p=5 z%o7-fl03x`gaSNyr?3_JLv?2RLS3F*8ub>Jd@^Cc17)v8vYEK4aqo?OS@W9mt%ITJ z9=S2%R8M){CugT@k~~0x`}Vl!svYqX=E)c_oU6o}#Hb^%G1l3BudxA{F*tbjG;W_>=xV73pKY53v%>I)@D36I_@&p$h|Aw zonQS`07z_F#@T-%@-Tb|)7;;anoD_WH>9ewFy(ZcEOM$#Y)8>qi7rCnsH9GO-_7zF zu*C87{Df1P4TEOsnzZ@H%&lvV(3V@;Q!%+OYRp`g05PjY^gL$^$-t0Y>H*CDDs?FZly*oZ&dxvsxaUWF!{em4{A>n@vpXg$dwvt@_rgmHF z-MER`ABa8R-t_H*kv>}CzOpz;!>p^^9ztHMsHL|SRnS<-y5Z*r(_}c4=fXF`l^-i}>e7v!qs_jv zqvWhX^F=2sDNWA9c@P0?lUlr6ecrTKM%pNQ^?*Lq?p-0~?_j50xV%^(+H>sMul#Tw zeciF*1=?a7cI(}352%>LO96pD+?9!fNyl^9v3^v&Y4L)mNGK0FN43&Xf8jUlxW1Bw zyiu2;qW-aGNhs=zbuoxnxiwZ3{PFZM#Kw)9H@(hgX23h(`Wm~m4&TvoZoYp{plb^> z_#?vXcxd>r7K+1HKJvhed>gtK`TAbJUazUWQY6T~t2af%#<+Veyr%7-#*A#@&*;@g58{i|E%6yC_InGXCOd{L0;$)z#?n7M`re zh!kO{6=>7I?*}czyF7_frt#)s1CFJ_XE&VrDA?Dp3XbvF{qsEJgb&OLSNz_5g?HpK z9)8rsr4JN!Af3G9!#Qn(6zaUDqLN(g2g8*M)Djap?WMK9NKlkC)E2|-g|#-rp%!Gz zAHd%`iq|81efi93m3yTBw3g0j#;Yb2X{mhRAI?&KDmbGqou(2xiRNb^sV}%%Wu0?< z?($L>(#BO*)^)rSgyNRni$i`R4v;GhlCZ8$@e^ROX(p=2_v6Y!%^As zu022)fHdv_-~Yu_H6WVPLpHQx!W%^6j)cBhS`O3QBW#x(eX54d&I22op(N59b*&$v zFiSRY6rOc^(dgSV1>a7-5C;(5S5MvKcM2Jm-LD9TGqDpP097%52V+0>Xqq!! zq4e3vj53SE6i8J`XcQB|MZPP8j;PAOnpGnllH6#Ku~vS42xP*Nz@~y%db7Xi8s09P z1)e%8ys6&M8D=Dt6&t`iKG_4X=!kgRQoh%Z`dc&mlOUqXk-k`jKv9@(a^2-Upw>?< zt5*^DV~6Zedbec4NVl($2T{&b)zA@b#dUyd>`2JC0=xa_fIm8{5um zr-!ApXZhC8@=vC2WyxO|!@0Km)h8ep*`^he92$@YwP>VcdoS5OC^s38e#7RPsg4j+ zbVGG}WRSET&ZfrcR(x~k8n1rTP%CnfUNKUonD$P?FtNFF#cn!wEIab-;jU=B1dHK@ z(;(yAQJ`O$sMn>h;pf^8{JISW%d+@v6@CnXh9n5TXGC}?FI9i-D0OMaIg&mAg=0Kn zNJ7oz5*ReJukD55fUsMuaP+H4tDN&V9zfqF@ zr=#ecUk9wu{0;!+gl;3Bw=Vn^)z$ahVhhw)io!na&9}LmWurLb0zubxK=UEnU*{5P z+SP}&*(iBKSO4{alBHaY^)5Q=mZ+2OwIooJ7*Q5XJ+2|q`9#f?6myq!&oz?klihLq z4C)$XP!BNS0G_Z1&TM>?Jk{S~{F3n83ioli=IO6f%wkvCl(RFFw~j0tb{GvXTx>*sB0McY0s&SNvj4+^h`9nJ_wM>F!Uc>X}9PifQekn0sKI2SAJP!a4h z5cyGTuCj3ZBM^&{dRelIlT^9zcfaAuL5Y~bl!ppSf`wZbK$z#6U~rdclk``e+!qhe z6Qspo*%<)eu6?C;Bp<^VuW6JI|Ncvyn+LlSl;Mp22Bl7ARQ0Xc24%29(ZrdsIPw&-=yHQ7_Vle|5h>AST0 zUGX2Zk34vp?U~IHT|;$U86T+UUHl_NE4m|}>E~6q``7hccCaT^#y+?wD##Q%HwPd8 zV3x4L4|qqu`B$4(LXqDJngNy-{&@aFBvVsywt@X^}iH7P%>bR?ciC$I^U-4Foa`YKI^qDyGK7k%E%c_P=yzAi`YnxGA%DeNd++j3*h^ z=rn>oBd0|~lZ<6YvmkKY*ZJlJ;Im0tqgWu&E92eqt;+NYdxx`eS(4Hw_Jb5|yVvBg z*tbdY^!AN;luEyN4VRhS@-_DC{({ziH{&Z}iGElSV~qvT>L-8G%+yEL zX#MFOhj{InyKG=mvW-<1B@c-}x$vA(nU?>S>0*eN#!SLzQ)Ex7fvQ)S4D<8|I#N$3 zT5Ei`Z?cxBODHX8(Xp73v`IsAYC@9b;t}z0wxVuQSY1J^GRwDPN@qbM-ZF48T$GZ< z8WU+;Pqo?{ghI-KZ-i*ydXu`Ep0Xw^McH_KE9J0S7G;x8Fe`DVG?j3Pv=0YzJ}yZR z%2=oqHiUjvuk0~Ca>Kol4CFi0_xQT~;_F?=u+!kIDl-9g`#ZNZ9HCy17Ga1v^Jv9# z{T4Kb1-AzUxq*MutfOWWZgD*HnFfyYg0&e9f(5tZ>krPF6{VikNeHoc{linPPt#Si z&*g>(c54V8rT_AX!J&bNm-!umPvOR}vDai#`CX___J#=zeB*{4<&2WpaDncZsOkp* zsg<%@@rbrMkR_ux9?LsQxzoBa1s%$BBn6vk#{&&zUwcfzeCBJUwFYSF$08qDsB;gWQN*g!p8pxjofWbqNSZOEKOaTx@+* zwdt5*Q47@EOZ~EZL9s?1o?A%9TJT=Ob_13yyugvPg*e&ZU(r6^k4=2+D-@n=Hv5vu zSXG|hM(>h9^zn=eQ=$6`JO&70&2|%V5Lsx>)(%#;pcOfu>*nk_3HB_BNaH$`jM<^S zcSftDU1?nL;jy)+sfonQN}(}gUW?d_ikr*3=^{G)=tjBtEPe>TO|0ddVB zTklrSHiW+!#26frPXQQ(YN8DG$PZo?(po(QUCCf_OJC`pw*uey00%gmH!`WJkrKXj2!#6?`T25mTu9OJp2L8z3! z=arrL$ZqxuE{%yV)14Kd>k}j7pxZ6#$Dz8$@WV5p8kTqN<-7W)Q7Gt2{KoOPK_tZ| zf2WG~O5@{qPI+W<4f_;reuFVdO^5`ADC1!JQE|N`s3cq@(0WB!n0uh@*c{=LAd;~} zyGK@hbF-Oo+!nN)@i*O(`@FA#u?o=~e{`4O#5}z&=UkU*50fOrzi11D^&FOqe>wii z?*k+2|EcUs;Gx{!@KBT~>PAwLrIDT7Th=Utu?~?np@t^gFs?zgX=D${RwOY^WGh-+ z+#4$066ISh8eYW#FXWp~S`<*%O^ZuItL1Tyqt8#tZ zY120E;^VG`!lZn&3sPd$RkdHpU#|w+bYV)pJC|SH9g%|5IkxVTQcBA4CL0}$&}ef@ zW^Vtj%M;;_1xxP9x#ex17&4N*{ksO*_4O}xYu(p*JkL#yr}@7b)t5X?%CY<+s5_MJ zuiqt+N_;A(_)%lumoyRFixWa-M7qK_9s6<1X?JDa9fP!+_6u~~M$5L=ipB=7(j#f< zZ34J%=bs549%~_mA(|={uZNs_0?o7;-LBP(ZRnkd{-^|2|=4vUTmtByHL8 zEph`(LSEzQj68a+`d$V<45J7cyv^#|^|%fD#si1Nx!4NW*`l*{->HEWNh6-|g>-=r zXmQ|-i}Ku$ndUeHQ^&ieT!Lf}vf6GaqW9$DJ2NWrqwPY%%4nip$@vK$nRp*_C-v<| zuKz~ZyN&<%!NS26&x?jhy+@awJipMQ-8(X4#Ae5??U<1QMt1l9R=w9fAnEF}NYu$2 z>6}Vkc zIb*A?G*z8^IvibmBKn_u^5&T_1oey0gZS2~obf(#xk=erZGTEdQnt3DMGM+0oPwss zj5zXD;(oWhB_T@~Ig#9@v)AKtXu3>Inmgf@A|-lD-1U>cNyl3h?ADD9)GG4}zUGPk zZzaXe!~Kf?<~@$G?Uql3t8jy9{2!doq4=J}j9ktTxss{p6!9UdjyDERlA*xZ!=Q)KDs5O)phz>Vq3BNGoM(H|=1*Q4$^2fTZw z(%nq1P|5Rt81}SYJpEEzMPl5VJsV5&4e)ZWKDyoZ>1EwpkHx-AQVQc8%JMz;{H~p{=FXV>jIxvm4X*qv52e?Y-f%DJ zxEA165GikEASQ^fH6K#d!Tpu2HP{sFs%E=e$gYd$aj$+xue6N+Wc(rAz~wUsk2`(b z8Kvmyz%bKQxpP}~baG-rwYcYCvkHOi zlkR<=>ZBTU*8RF_d#Bl@zZsRIhx<%~Z@Z=ik z>adw3!DK(8R|q$vy{FTxw%#xliD~6qXmY^7_9kthVPTF~Xy1CfBqbU~?1QmxmU=+k z(ggxvEuA;0e&+ci-zQR{-f7aO{O(Pz_OsEjLh_K>MbvoZ4nxtk5u{g@nPv)cgW_R} z9}EA4K4@z0?7ue}Z(o~R(X&FjejUI2g~08PH1E4w>9o{)S(?1>Z0XMvTb|;&EuyOE zGvWNpYX)Nv<8|a^;1>bh#&znEcl-r!T#pn= z4$?Yudha6F%4b>*8@=BdtXXY4N+`U4Dmx$}>HeVJk-QdTG@t!tVT#0(LeV0gvqyyw z2sEp^9eY0N`u10Tm4n8No&A=)IeEC|gnmEXoNSzu!1<4R<%-9kY_8~5Ej?zRegMn78wuMs#;i&eUA0Zk_RXQ3b&TT} z;SCI=7-FUB@*&;8|n>(_g^HGf3@QODE3LpmX~ELnymQm{Sx9xrKS zK29p~?v@R$0=v6Dr5aW>-!{+h@?Q58|Kz8{{W`%J+lDAdb&M5VHrX_mDY;1-JLnf)ezmPau$)1;=`-FU=-r-83tX=C`S#}GZufju zQ>sXNT0Ny=k@nc%cFnvA_i4SC)?_ORXHq8B4D%el1uPX`c~uG#S1M7C+*MMqLw78E zhY2dI8@+N^qrMI1+;TUda(vGqGSRyU{Fnm`aqrr7bz42c5xsOO-~oZpkzorD1g}Y<6rk&3>PsSGy}W?MtqFky@A(X# zIuNZK0cK?^=;PUAu>j0#HtjbHCV*6?jzA&OoE$*Jlga*}LF`SF?WLhv1O|zqC<>*> zYB;#lsYKx0&kH@BFpW8n*yDcc6?;_zaJs<-jPSkCsSX-!aV=P5kUgF@Nu<{a%#K*F z134Q{9|YX7X(v$62_cY3^G%t~rD>Q0z@)1|zs)vjJ6Jq9;7#Ki`w+eS**En?7;n&7 zu==V3T&eFboN3ZiMx3D8qYc;VjFUk_H-WWCau(VFXSQf~viH0L$gwD$UfFHqNcgN`x}M+YQ6RnN<+@t>JUp#)9YOkqst-Ga?{FsDpEeX0(5v{0J~SEbWiL zXC2}M4?UH@u&|;%0y`eb33ldo4~z-x8zY!oVmV=c+f$m?RfDC35mdQ2E>Pze7KWP- z>!Bh<&57I+O_^s}9Tg^k)h7{xx@0a0IA~GAOt2yy!X%Q$1rt~LbTB6@Du!_0%HV>N zlf)QI1&gvERKwso23mJ!Ou6ZS#zCS5W`gxE5T>C#E|{i<1D35C222I33?Njaz`On7 zi<+VWFP6D{e-{yiN#M|Jgk<44u1TiMI78S5W`Sdb5f+{zu34s{CfWN7a3Cf^@L%!& zN$?|!!9j2c)j$~+R6n#891w-z8(!oBpL2K=+%a$r2|~8-(vQj5_XT`<0Ksf;oP+tz z9CObS!0m)Tgg`K#xBM8B(|Z)Wb&DYL{WTYv`;A=q6~Nnx2+!lTIXtj8J7dZE!P_{z z#f8w6F}^!?^KE#+ZDv+xd5O&3EmomZzsv?>E-~ygGum45fk!SBN&|eo1rKw^?aZJ4 E2O(~oYXATM diff --git a/examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.properties b/examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index 7855fafe49..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Sat Oct 08 17:24:34 CST 2022 -distributionBase=GRADLE_USER_HOME -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip -distributionPath=wrapper/dists -zipStorePath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME diff --git a/examples/vision/ocr/PP-OCRv3/android/gradlew b/examples/vision/ocr/PP-OCRv3/android/gradlew deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/examples/vision/ocr/PP-OCRv3/android/gradlew.bat b/examples/vision/ocr/PP-OCRv3/android/gradlew.bat deleted file mode 100644 index 107acd32c4..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/gradlew.bat +++ /dev/null @@ -1,89 +0,0 @@ -@rem -@rem Copyright 2015 the original author or authors. -@rem -@rem Licensed under the Apache License, Version 2.0 (the "License"); -@rem you may not use this file except in compliance with the License. -@rem You may obtain a copy of the License at -@rem -@rem https://www.apache.org/licenses/LICENSE-2.0 -@rem -@rem Unless required by applicable law or agreed to in writing, software -@rem distributed under the License is distributed on an "AS IS" BASIS, -@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -@rem See the License for the specific language governing permissions and -@rem limitations under the License. -@rem - -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Resolve any "." and ".." in APP_HOME to make it shorter. -for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto execute - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto execute - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/examples/vision/ocr/PP-OCRv3/android/settings.gradle b/examples/vision/ocr/PP-OCRv3/android/settings.gradle deleted file mode 100644 index e7b4def49c..0000000000 --- a/examples/vision/ocr/PP-OCRv3/android/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -include ':app' diff --git a/examples/vision/ocr/PP-OCRv3/c/README.md b/examples/vision/ocr/PP-OCRv3/c/README.md deleted file mode 100755 index c50e1ad3de..0000000000 --- a/examples/vision/ocr/PP-OCRv3/c/README.md +++ /dev/null @@ -1,251 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 C Deployment Example - -This directory provides `infer.c` to finish the deployment of PPOCRv3 on CPU/GPU. - -Before deployment, two steps require confirmation - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Taking inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model. - - -```bash -mkdir build -cd build -# Download the FastDeploy precompiled library. Users can choose your appropriate version in the `FastDeploy Precompiled Library` mentioned above -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar -xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar -xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU inference -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -The above command works for Linux or MacOS. For SDK in Windows, refer to: -- [How to use FastDeploy C++ SDK in Windows](../../../../../docs/en/faq/use_sdk_on_windows.md) - -The visualized result after running is as follows - - - - - -## PPOCRv3 C Interface - -### RuntimeOption - -```c -FD_C_RuntimeOptionWrapper* FD_C_CreateRuntimeOptionWrapper() -``` - -> Create a RuntimeOption object, and return a pointer to manipulate it. -> -> **Return** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - - -```c -void FD_C_RuntimeOptionWrapperUseCpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper) -``` - -> Enable Cpu inference. -> -> **Params** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - -```c -void FD_C_RuntimeOptionWrapperUseGpu( - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - int gpu_id) -``` -> Enable Gpu inference. -> -> **Params** -> -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Pointer to manipulate RuntimeOption object. - -> * **gpu_id**(int): gpu id - - -### Model - -```c - -FD_C_DBDetectorWrapper* FD_C_CreateDBDetectorWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) - -``` - -> Create a DBDetector model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> * **fd_c_dbdetector_wrapper**(FD_C_DBDetectorWrapper*): Pointer to manipulate DBDetector object. - -```c -FD_C_ClassifierWrapper* FD_C_CreateClassifierWrapper( - const char* model_file, const char* params_file, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> Create a Classifier model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> -> * **fd_c_classifier_wrapper**(FD_C_ClassifierWrapper*): Pointer to manipulate Classifier object. - -```c -FD_C_RecognizerWrapper* FD_C_CreateRecognizerWrapper( - const char* model_file, const char* params_file, const char* label_path, - FD_C_RuntimeOptionWrapper* fd_c_runtime_option_wrapper, - const FD_C_ModelFormat model_format -) -``` -> Create a Recognizer model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **model_file**(const char*): Model file path -> * **params_file**(const char*): Parameter file path -> * **label_path**(const char*): Label file path -> * **fd_c_runtime_option_wrapper**(FD_C_RuntimeOptionWrapper*): Backend inference configuration. None by default, which is the default configuration -> * **model_format**(FD_C_ModelFormat): Model format. -> -> **Return** -> * **fd_c_recognizer_wrapper**(FD_C_RecognizerWrapper*): Pointer to manipulate Recognizer object. - -```c -FD_C_PPOCRv3Wrapper* FD_C_CreatePPOCRv3Wrapper( - FD_C_DBDetectorWrapper* det_model, - FD_C_ClassifierWrapper* cls_model, - FD_C_RecognizerWrapper* rec_model -) -``` -> Create a PPOCRv3 model object, and return a pointer to manipulate it. -> -> **Params** -> -> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector model -> * **cls_model**(FD_C_ClassifierWrapper*): Classifier model -> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer model -> -> **Return** -> -> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): Pointer to manipulate PPOCRv3 object. - - - -#### Read and write image - -```c -FD_C_Mat FD_C_Imread(const char* imgpath) -``` - -> Read an image, and return a pointer to cv::Mat. -> -> **Params** -> -> * **imgpath**(const char*): image path -> -> **Return** -> -> * **imgmat**(FD_C_Mat): pointer to cv::Mat object which holds the image. - - -```c -FD_C_Bool FD_C_Imwrite(const char* savepath, FD_C_Mat img); -``` - -> Write image to a file. -> -> **Params** -> -> * **savepath**(const char*): save path -> * **img**(FD_C_Mat): pointer to cv::Mat object -> -> **Return** -> -> * **result**(FD_C_Bool): bool to indicate success or failure - - - -#### Prediction - -```c -FD_C_Bool FD_C_PPOCRv3WrapperPredict( - FD_C_PPOCRv3Wrapper* fd_c_ppocrv3_wrapper, - FD_C_Mat img, - FD_C_OCRResult* result) -``` -> -> Predict an image, and generate result. -> -> **Params** -> * **fd_c_ppocrv3_wrapper**(FD_C_PPOCRv3Wrapper*): Pointer to manipulate PPOCRv3 object. -> * **img**(FD_C_Mat): pointer to cv::Mat object, which can be obained by FD_C_Imread interface -> * **result**(FD_C_OCRResult*): OCR prediction results, including the position of the detection box from the detection model, the classification of the direction from the classification model, and the recognition result from the recognition model. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for OCRResult - -#### Result - -```c -FD_C_Mat FD_C_VisOcr(FD_C_Mat im, FD_C_OCRResult* ocr_result) -``` -> -> Visualize OCR results and return visualization image. -> -> **Params** -> * **im**(FD_C_Mat): pointer to input image -> * **ocr_result**(FD_C_OCRResult*): pointer to C FD_C_OCRResult structure -> -> **Return** -> * **vis_im**(FD_C_Mat): pointer to visualization image. - - - - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv3 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/cpp/README.md b/examples/vision/ocr/PP-OCRv3/cpp/README.md deleted file mode 100755 index 923bda5130..0000000000 --- a/examples/vision/ocr/PP-OCRv3/cpp/README.md +++ /dev/null @@ -1,64 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 C++ Deployment Example - -This directory provides examples that `infer.cc` fast finishes the deployment of PPOCRv3 on CPU/GPU and GPU accelerated by TensorRT. - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Taking the CPU inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 0.7.0 or above (x.x.x>=0.7.0) is required to support this model. - -``` -mkdir build -cd build -# Download the FastDeploy precompiled library. Users can choose your appropriate version in the `FastDeploy Precompiled Library` mentioned above -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar -xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar -xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU inference -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -# TensorRT inference on GPU -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 2 -# Paddle-TRT inference on GPU -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 3 -# KunlunXin XPU inference -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 4 -# Huawei Ascend inference, need to use the infer_static_shape_demo, if the user needs to predict the image continuously, the input image size needs to be prepared as a uniform size. -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -The above command works for Linux or MacOS. For SDK in Windows, refer to: -- [How to use FastDeploy C++ SDK in Windows](../../../../../docs/cn/faq/use_sdk_on_windows.md) - -The visualized result after running is as follows - - - -## Other Documents - -- [C++ API Reference](https://baidu-paddle.github.io/fastdeploy-api/cpp/html/) -- [PPOCR Model Description](../../) -- [PPOCRv3 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/en/faq/how_to_change_backend.md) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/cpp/README_CN.md b/examples/vision/ocr/PP-OCRv3/cpp/README_CN.md deleted file mode 100644 index 167d2d9524..0000000000 --- a/examples/vision/ocr/PP-OCRv3/cpp/README_CN.md +++ /dev/null @@ -1,67 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv3 C++部署示例 - -本目录下提供`infer.cc`快速完成PPOCRv3在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本0.7.0以上(x.x.x>=0.7.0) - -``` -mkdir build -cd build -# 下载FastDeploy预编译库,用户可在上文提到的`FastDeploy预编译库`中自行选择合适的版本使用 -wget https://bj.bcebos.com/fastdeploy/release/cpp/fastdeploy-linux-x64-x.x.x.tgz -tar xvf fastdeploy-linux-x64-x.x.x.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar -xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar -xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU推理 -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU推理 -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -# GPU上TensorRT推理 -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 2 -# GPU上Paddle-TRT推理 -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 3 -# 昆仑芯XPU推理 -./infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 4 -# 华为昇腾推理,需要使用静态shape的demo, 若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸 -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -以上命令只适用于Linux或MacOS, Windows下SDK的使用方式请参考: -- [如何在Windows中使用FastDeploy C++ SDK](../../../../../docs/cn/faq/use_sdk_on_windows.md) - -如果用户使用华为昇腾NPU部署, 请参考以下方式在部署前初始化部署环境: -- [如何使用华为昇腾NPU部署](../../../../../docs/cn/faq/use_sdk_on_ascend.md) - -运行完成可视化结果如下图所示 - - - -## 其它文档 - -- [C++ API查阅](https://baidu-paddle.github.io/fastdeploy-api/cpp/html/) -- [PPOCR 系列模型介绍](../../) -- [PPOCRv3 Python部署](../python) -- [模型预测结果说明](../../../../../docs/cn/faq/how_to_change_backend.md) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/csharp/CMakeLists.txt b/examples/vision/ocr/PP-OCRv3/csharp/CMakeLists.txt deleted file mode 100644 index 7ae8e2aba3..0000000000 --- a/examples/vision/ocr/PP-OCRv3/csharp/CMakeLists.txt +++ /dev/null @@ -1,22 +0,0 @@ -PROJECT(infer_demo CSharp) -CMAKE_MINIMUM_REQUIRED (VERSION 3.10) - -# Set the C# language version (defaults to 3.0 if not set). -set(CMAKE_CSharp_FLAGS "/langversion:10") -set(CMAKE_DOTNET_TARGET_FRAMEWORK "net6.0") -set(CMAKE_DOTNET_SDK "Microsoft.NET.Sdk") - -# 指定下载解压后的fastdeploy库路径 -option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") - -include(${FASTDEPLOY_INSTALL_DIR}/FastDeployCSharp.cmake) - - -add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cs) - -set_property(TARGET infer_demo PROPERTY VS_DOTNET_REFERENCES - ${FASTDEPLOY_DOTNET_REFERENCES} -) - -set_property(TARGET infer_demo - PROPERTY VS_PACKAGE_REFERENCES ${FASTDEPLOY_PACKAGE_REFERENCES}) diff --git a/examples/vision/ocr/PP-OCRv3/csharp/README.md b/examples/vision/ocr/PP-OCRv3/csharp/README.md deleted file mode 100755 index f0385b2dc9..0000000000 --- a/examples/vision/ocr/PP-OCRv3/csharp/README.md +++ /dev/null @@ -1,153 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 C# Deployment Example - -This directory provides `infer.cs` to finish the deployment of PPOCRv3 on CPU/GPU. - -Before deployment, two steps require confirmation - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Please follow below instructions to compile and test in Windows. FastDeploy version 1.0.4 or above (x.x.x>=1.0.4) is required to support this model. - -## 1. Download C# package management tool nuget client -> https://dist.nuget.org/win-x86-commandline/v6.4.0/nuget.exe - -Add nuget program into system variable **PATH** - -## 2. Download model and image for test -> https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar # (Decompress it) -> https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -> https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -> https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -## 3. Compile example code - -Open `x64 Native Tools Command Prompt for VS 2019` command tool on Windows, cd to the demo path of ppyoloe and execute commands - -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv3\csharp - -mkdir build && cd build -cmake .. -G "Visual Studio 16 2019" -A x64 -DFASTDEPLOY_INSTALL_DIR=D:\Download\fastdeploy-win-x64-gpu-x.x.x -DCUDA_DIRECTORY="C:/Program Files/NVIDIA GPU Computing Toolkit/CUDA/v11.2" - -nuget restore -msbuild infer_demo.sln /m:4 /p:Configuration=Release /p:Platform=x64 -``` - -For more information about how to use FastDeploy SDK to compile a project with Visual Studio 2019. Please refer to -- [Using the FastDeploy C++ SDK on Windows Platform](../../../../../docs/en/faq/use_sdk_on_windows.md) - -## 4. Execute compiled program - -fastdeploy.dll and related dynamic libraries are required by the program. FastDeploy provide a script to copy all required dll to your program path. - -```shell -cd D:\Download\fastdeploy-win-x64-gpu-x.x.x - -fastdeploy_init.bat install %cd% D:\Download\fastdeploy-win-x64-gpu-x.x.x\examples\vision\ocr\PP-OCRv3\csharp\build\Release -``` - -Then you can run your program and test the model with image -```shell -cd Release -# CPU inference -infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 0 -# GPU inference -infer_demo ./ch_PP-OCRv3_det_infer ./ch_ppocr_mobile_v2.0_cls_infer ./ch_PP-OCRv3_rec_infer ./ppocr_keys_v1.txt ./12.jpg 1 -``` - -## PPOCRv3 C# Interface - -### Model Class - -```c# -fastdeploy.vision.ocr.DBDetector( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> DBDetector initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.vision.ocr.Classifier( - string model_file, - string params_file, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Classifier initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.vision.ocr.Recognizer( - string model_file, - string params_file, - string label_path, - fastdeploy.RuntimeOption runtime_option = null, - fastdeploy.ModelFormat model_format = ModelFormat.PADDLE) -``` - -> Recognizer initialization - -> **Params** - ->> * **model_file**(str): Model file path ->> * **params_file**(str): Parameter file path ->> * **runtime_option**(RuntimeOption): Backend inference configuration. None by default, which is the default configuration ->> * **model_format**(ModelFormat): Model format. - -```c# -fastdeploy.pipeline.PPOCRv3Model( - DBDetector dbdetector, - Classifier classifier, - Recognizer recognizer) -``` - -> PPOCRv3Model initialization - -> **Params** - ->> * **det_model**(FD_C_DBDetectorWrapper*): DBDetector model ->> * **cls_model**(FD_C_ClassifierWrapper*): Classifier model ->> * **rec_model**(FD_C_RecognizerWrapper*): Recognizer model - -#### Predict Function - -```c# -fastdeploy.OCRResult Predict(OpenCvSharp.Mat im) -``` - -> Model prediction interface. Input images and output results directly. -> -> **Params** -> ->> * **im**(Mat): Input images in HWC or BGR format ->> -> **Return** -> ->> * **result**: OCR prediction results, including the position of the detection box from the detection model, the classification of the direction from the classification model, and the recognition result from the recognition model. Refer to [Vision Model Prediction Results](../../../../../docs/api/vision_results/) for OCRResult - -## Other Documents - -- [PPOCR Model Description](../../) -- [PPOCRv3 Python Deployment](../python) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/mini_program/README.md b/examples/vision/ocr/PP-OCRv3/mini_program/README.md deleted file mode 100644 index 996331aeac..0000000000 --- a/examples/vision/ocr/PP-OCRv3/mini_program/README.md +++ /dev/null @@ -1,40 +0,0 @@ -English | [简体中文](README_CN.md) -# PP-OCRv3 Wechat Mini-program Deployment Example - -This document introduces the deployment of PP-OCRv3 model from PaddleOCR in Wechat mini-program, and the js interface in the @paddle-js-models/ocr npm package. - - -## Deploy PP-OCRv3 models in Wechat Mini-program - -For the deployment of PP-OCRv3 models in Wechat mini-program, refer to [**reference document**](../../../../application/js/mini_program) - - -## PP-OCRv3 js interface - -``` -import * as ocr from "@paddle-js-models/ocr"; -await ocr.init(detConfig, recConfig); -const res = await ocr.recognize(img, option, postConfig); -``` -ocr model loading and initialization, where the model is in Paddle.js model format. For the conversion of js models, refer to [document](../../../../application/js/web_demo/README.md) - -**init function parameter** - -> * **detConfig**(dict): The configuration parameter for text detection model. Default {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_det_infer_js_960/model.json', fill: '#fff', mean: [0.485, 0.456, 0.406],std: [0.229, 0.224, 0.225]}; Among them, modelPath is the path of the text detection model; fill is the padding value in the image pre-processing; mean and std are the mean and standard deviation in the pre-processing -> * **recConfig**(dict)): The configuration parameter for text recognition model. Default {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_rec_infer_js/model.json', fill: '#000', mean: [0.5, 0.5, 0.5], std: [0.5, 0.5, 0.5]}; Among them, modelPath is the path of the text detection model, fill is the padding value in the image pre-processing, and mean/std are the mean and standard deviation in the pre-processing - - -**recognize function parameter** - -> * **img**(HTMLImageElement): Enter an image parameter in HTMLImageElement. -> * **option**(dict): The canvas parameter of the visual text detection box. No need to set. -> * **postConfig**(dict): Text detection post-processing parameter. Default: {shape: 960, thresh: 0.3, box_thresh: 0.6, unclip_ratio:1.5}; thresh is the binarization threshold of the output prediction image; box_thresh is the threshold of the output box, below which the prediction box will be discarded; unclip_ratio is the expansion ratio of the output box. - - -## Other Documents - -- [PP-OCR Model Description](../../) -- [PP-OCRv3 C++ Deployment](../cpp) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) -- [Web demo document of PP-OCRv3 models](../../../../application/js/web_demo/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/mini_program/README_CN.md b/examples/vision/ocr/PP-OCRv3/mini_program/README_CN.md deleted file mode 100644 index e3a9691008..0000000000 --- a/examples/vision/ocr/PP-OCRv3/mini_program/README_CN.md +++ /dev/null @@ -1,40 +0,0 @@ -[English](README.md) | 简体中文 -# PP-OCRv3 微信小程序部署示例 - -本节介绍部署PaddleOCR的PP-OCRv3模型在微信小程序中运行,以及@paddle-js-models/ocr npm包中的js接口。 - - -## 微信小程序部署PP-OCRv3模型 - -PP-OCRv3模型部署到微信小程序[**参考文档**](../../../../application/js/mini_program) - - -## PP-OCRv3 js接口 - -``` -import * as ocr from "@paddle-js-models/ocr"; -await ocr.init(detConfig, recConfig); -const res = await ocr.recognize(img, option, postConfig); -``` -ocr模型加载和初始化,其中模型为Paddle.js模型格式,js模型转换方式参考[文档](../../../../application/js/web_demo/README.md) - -**init函数参数** - -> * **detConfig**(dict): 文本检测模型配置参数,默认值为 {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_det_infer_js_960/model.json', fill: '#fff', mean: [0.485, 0.456, 0.406],std: [0.229, 0.224, 0.225]}; 其中,modelPath为文本检测模型路径,fill 为图像预处理padding的值,mean和std分别为预处理的均值和标准差 -> * **recConfig**(dict)): 文本识别模型配置参数,默认值为 {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_rec_infer_js/model.json', fill: '#000', mean: [0.5, 0.5, 0.5], std: [0.5, 0.5, 0.5]}; 其中,modelPath为文本检测模型路径,fill 为图像预处理padding的值,mean和std分别为预处理的均值和标准差 - - -**recognize函数参数** - -> * **img**(HTMLImageElement): 输入图像参数,类型为HTMLImageElement。 -> * **option**(dict): 可视化文本检测框的canvas参数,可不用设置。 -> * **postConfig**(dict): 文本检测后处理参数,默认值为:{shape: 960, thresh: 0.3, box_thresh: 0.6, unclip_ratio:1.5}; thresh是输出预测图的二值化阈值;box_thresh是输出框的阈值,低于此值的预测框会被丢弃,unclip_ratio是输出框扩大的比例。 - - -## 其它文档 - -- [PP-OCR 系列模型介绍](../../) -- [PP-OCRv3 C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) -- [PP-OCRv3模型web demo文档](../../../../application/js/web_demo/README.md) diff --git a/examples/vision/ocr/PP-OCRv3/python/README.md b/examples/vision/ocr/PP-OCRv3/python/README.md deleted file mode 100755 index 977c99b9f7..0000000000 --- a/examples/vision/ocr/PP-OCRv3/python/README.md +++ /dev/null @@ -1,55 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 Python Deployment Example - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Install FastDeploy Python whl package. Refer to [FastDeploy Python Installation](../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -This directory provides examples that `infer.py` fast finishes the deployment of PPOCRv3 on CPU/GPU and GPU accelerated by TensorRT. The script is as follows - -``` - -# Download model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# Download the example code for deployment -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vision/ocr/PP-OCRv3/python/ - -# CPU inference -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu -# GPU inference -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu -# TensorRT inference on GPU -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend trt -# KunlunXin XPU inference -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device kunlunxin -# HUAWEI Ascend inference requires static shape script. The size of input images should be consistent if you want to continuously predict images. -python infer_static_shape.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device ascend -``` - -The visualized result after running is as follows - - - - - -## Other Documents - -- [Python API reference](https://baidu-paddle.github.io/fastdeploy-api/python/html/) -- [PPOCR Model Description](../../) -- [PPOCRv3 C++ Deployment](../cpp) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/python/README_CN.md b/examples/vision/ocr/PP-OCRv3/python/README_CN.md deleted file mode 100644 index ae1f59954f..0000000000 --- a/examples/vision/ocr/PP-OCRv3/python/README_CN.md +++ /dev/null @@ -1,55 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv3 Python部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -本目录下提供`infer.py`快速完成PPOCRv3在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 - -``` - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -#下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vision/ocr/PP-OCRv3/python/ - -# CPU推理 -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device cpu -# GPU推理 -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu -# GPU上使用TensorRT推理 -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device gpu --backend trt -# 昆仑芯XPU推理 -python infer.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device kunlunxin -# 华为昇腾推理,需要使用静态shape脚本, 若用户需要连续地预测图片, 输入图片尺寸需要准备为统一尺寸 -python infer_static_shape.py --det_model ch_PP-OCRv3_det_infer --cls_model ch_ppocr_mobile_v2.0_cls_infer --rec_model ch_PP-OCRv3_rec_infer --rec_label_file ppocr_keys_v1.txt --image 12.jpg --device ascend -``` - -运行完成可视化结果如下图所示 - - - - - -## 其它文档 - -- [Python API文档查阅](https://baidu-paddle.github.io/fastdeploy-api/python/html/) -- [PPOCR 系列模型介绍](../../) -- [PPOCRv3 C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) -- [如何切换模型推理后端引擎](../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/python/serving/README.md b/examples/vision/ocr/PP-OCRv3/python/serving/README.md deleted file mode 100644 index 09d8dfcf05..0000000000 --- a/examples/vision/ocr/PP-OCRv3/python/serving/README.md +++ /dev/null @@ -1,44 +0,0 @@ -English | [简体中文](README_CN.md) - -# PP-OCRv3 Python Simple Serving Demo - -## Environment - -- 1. Prepare environment and install FastDeploy Python whl, refer to [download_prebuilt_libraries](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. For installing the FastDeploy Python whl package, please refer to [FastDeploy Python Installation](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Server: -```bash -# Download demo code -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/python/serving - -# Download models and labels -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# Launch server, change the configurations in server.py to select hardware, backend, etc. -# and use --host, --port to specify IP and port -fastdeploy simple_serving --app server:app -``` - -Client: -```bash -# Download demo code -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/python/serving - -# Download test image -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -# Send request and get inference result (Please adapt the IP and port if necessary) -python client.py -``` diff --git a/examples/vision/ocr/PP-OCRv3/python/serving/README_CN.md b/examples/vision/ocr/PP-OCRv3/python/serving/README_CN.md deleted file mode 100644 index f026aab080..0000000000 --- a/examples/vision/ocr/PP-OCRv3/python/serving/README_CN.md +++ /dev/null @@ -1,44 +0,0 @@ -简体中文 | [English](README.md) - -# PP-OCRv3 Python轻量服务化部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -服务端: -```bash -# 下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/python/serving - -# 下载模型和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# 启动服务,可修改server.py中的配置项来指定硬件、后端等 -# 可通过--host、--port指定IP和端口号 -fastdeploy simple_serving --app server:app -``` - -客户端: -```bash -# 下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/python/serving - -# 下载测试图片 -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -# 请求服务,获取推理结果(如有必要,请修改脚本中的IP和端口号) -python client.py -``` diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/README.md b/examples/vision/ocr/PP-OCRv3/rknpu2/README.md deleted file mode 100644 index 06ba9fc6eb..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/README.md +++ /dev/null @@ -1,77 +0,0 @@ -# PaddleOCR 模型部署 - -## PaddleOCR为多个模型组合串联任务,包含如下几个模型构成 - -* 文本检测 `DBDetector` -* [可选]方向分类 `Classifer` 用于调整进入文字识别前的图像方向 -* 文字识别 `Recognizer` 用于从图像中识别出文字 - -根据不同场景, FastDeploy汇总提供如下OCR任务部署, 用户需同时下载3个模型与字典文件(或2个,分类器可选), 完成OCR整个预测流程 - -## PP-OCR 中英文系列模型 - -下表中的模型下载链接由PaddleOCR模型库提供, 详见[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) - -| OCR版本 | 文本框检测 | 方向分类模型 | 文字识别 | 字典文件 | 说明 | -|:-------------------|:---------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:--------------------------------------------------------| -| ch_PP-OCRv3[推荐] | [ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3系列原始超轻量模型,支持中英文、多语种文本检测 | -| en_PP-OCRv3[推荐] | [en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3系列原始超轻量模型,支持英文与数字识别,除检测模型和识别模型的训练数据与中文模型不同以外,无其他区别 | -| ch_PP-OCRv2 | [ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测 | -| ch_PP-OCRv2_mobile | [ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2系列原始超轻量模型,支持中英文、多语种文本检测,比PPOCRv2更加轻量 | -| ch_PP-OCRv2_server | [ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2服务器系列模型, 支持中英文、多语种文本检测,比超轻量模型更大,但效果更好 | - -## 模型转换 - -在RKNPU2上使用PPOCR时,我们需要把Paddle静态图模型转为RKNN模型。 - -### 静态图模型转RKNN格式模型 - -rknn_toolkit2工具暂不支持直接从Paddle静态图模型直接转换为RKNN模型,因此我们需要先将Paddle静态图模型转为RKNN模型。 - -```bash -# 下载模型和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar -xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar -xvf ch_PP-OCRv3_rec_infer.tar - -# 转换模型到ONNX格式的模型 -paddle2onnx --model_dir ch_PP-OCRv3_det_infer \ - --model_filename inference.pdmodel \ - --params_filename inference.pdiparams \ - --save_file ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - --enable_dev_version True -paddle2onnx --model_dir ch_ppocr_mobile_v2.0_cls_infer \ - --model_filename inference.pdmodel \ - --params_filename inference.pdiparams \ - --save_file ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - --enable_dev_version True -paddle2onnx --model_dir ch_PP-OCRv3_rec_infer \ - --model_filename inference.pdmodel \ - --params_filename inference.pdiparams \ - --save_file ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - --enable_dev_version True - -# 固定模型的输入shape -python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - --output_model ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - --input_shape_dict "{'x':[1,3,960,960]}" -python -m paddle2onnx.optimize --input_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - --output_model ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - --input_shape_dict "{'x':[1,3,48,192]}" -python -m paddle2onnx.optimize --input_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - --output_model ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - --input_shape_dict "{'x':[1,3,48,320]}" - -# 转换ONNX模型到RKNN模型 -python tools/rknpu2/export.py --config_path tools/rknpu2/config/ppocrv3_det.yaml \ - --target_platform rk3588 -python tools/rknpu2/export.py --config_path tools/rknpu2/config/ppocrv3_rec.yaml \ - --target_platform rk3588 -python tools/rknpu2/export.py --config_path tools/rknpu2/config/ppocrv3_cls.yaml \ - --target_platform rk3588 -``` diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/CMakeLists.txt b/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/CMakeLists.txt deleted file mode 100644 index 9538fea6be..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/CMakeLists.txt +++ /dev/null @@ -1,14 +0,0 @@ -PROJECT(infer_demo C CXX) -CMAKE_MINIMUM_REQUIRED (VERSION 3.10) - -# 指定下载解压后的fastdeploy库路径 -option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") - -include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) - -# 添加FastDeploy依赖头文件 -include_directories(${FASTDEPLOY_INCS}) - -add_executable(infer_static_shape_demo ${PROJECT_SOURCE_DIR}/infer_static_shape.cc) -# 添加FastDeploy库依赖 -target_link_libraries(infer_static_shape_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README.md b/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README.md deleted file mode 100755 index 93f9b8a9cd..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README.md +++ /dev/null @@ -1,55 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 C++ Deployment Example - -This directory provides examples that `infer.cc` fast finishes the deployment of PPOCRv3 on CPU/GPU and GPU accelerated by TensorRT. - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Download the precompiled deployment library and samples code according to your development environment. Refer to [FastDeploy Precompiled Library](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -Taking the CPU inference on Linux as an example, the compilation test can be completed by executing the following command in this directory. FastDeploy version 0.7.0 or above (x.x.x>=0.7.0) is required to support this model. - -``` -mkdir build -cd build -# Download the FastDeploy precompiled library. Users can choose your appropriate version in the `FastDeploy Precompiled Library` mentioned above -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - - -# Download model, image, and dictionary files -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# CPU推理 -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - ./ppocr_keys_v1.txt \ - ./12.jpg \ - 0 -# RKNPU推理 -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ - ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ - ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ - ./ppocr_keys_v1.txt \ - ./12.jpg \ - 1 -``` - -The above command works for Linux or MacOS. For SDK in Windows, refer to: -- [How to use FastDeploy C++ SDK in Windows](../../../../../../docs/cn/faq/use_sdk_on_windows.md) - -The visualized result after running is as follows - - - -## Other Documents - -- [C++ API Reference](https://baidu-paddle.github.io/fastdeploy-api/cpp/html/) -- [PPOCR Model Description](../README.md) -- [PPOCRv3 Python Deployment](../python) -- [Model Prediction Results](../../../../../../docs/en/faq/how_to_change_backend.md) -- [How to switch the model inference backend engine](../../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README_CN.md b/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README_CN.md deleted file mode 100644 index 82860ddc53..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/cpp/README_CN.md +++ /dev/null @@ -1,63 +0,0 @@ -[English](README_CN.md) | 简体中文 -# PPOCRv3 C++部署示例 - -本目录下提供`infer.cc`快速完成PPOCRv3在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 - -在部署前,需确认你已经成功完成以下两个操作: - -* [正确编译FastDeploy SDK](../../../../../../docs/cn/faq/rknpu2/build.md). -* [成功转换模型](../README.md). - -在本目录执行如下命令即可完成编译测试,支持此模型需保证FastDeploy版本1.0.3以上(x.x.x>1.0.3), RKNN版本在1.4.1b22以上。 - -``` -mkdir build -cd build -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-x.x.x -make -j - -# 下载图片和字典文件 -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - - -# 拷贝RKNN模型到build目录 - -# CPU推理 -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - ./ppocr_keys_v1.txt \ - ./12.jpg \ - 0 -# RKNPU推理 -./infer_static_shape_demo ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ - ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ - ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ - ./ppocr_keys_v1.txt \ - ./12.jpg \ - 1 -``` - -运行完成可视化结果如下图所示: - - - -结果输出如下: - -```text -det boxes: [[276,174],[285,173],[285,178],[276,179]]rec text: rec score:0.000000 cls label: 1 cls score: 0.766602 -det boxes: [[43,408],[483,390],[483,431],[44,449]]rec text: 上海斯格威铂尔曼大酒店 rec score:0.888450 cls label: 0 cls score: 1.000000 -det boxes: [[186,456],[399,448],[399,480],[186,488]]rec text: 打浦路15号 rec score:0.988769 cls label: 0 cls score: 1.000000 -det boxes: [[18,501],[513,485],[514,537],[18,554]]rec text: 绿洲仕格维花园公寓 rec score:0.992730 cls label: 0 cls score: 1.000000 -det boxes: [[78,553],[404,541],[404,573],[78,585]]rec text: 打浦路252935号 rec score:0.983545 cls label: 0 cls score: 1.000000 -Visualized result saved in ./vis_result.jpg -``` - - -## 其它文档 - -- [C++ API查阅](https://baidu-paddle.github.io/fastdeploy-api/cpp/html/) -- [PPOCR 系列模型介绍](../../../README_CN.md) -- [PPOCRv3 Python部署](../python) -- [模型预测结果说明](../../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/python/README.md b/examples/vision/ocr/PP-OCRv3/rknpu2/python/README.md deleted file mode 100755 index b12cef6610..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/python/README.md +++ /dev/null @@ -1,49 +0,0 @@ -English | [简体中文](README_CN.md) -# PPOCRv3 Python Deployment Example - -Two steps before deployment - -- 1. Software and hardware should meet the requirements. Please refer to [FastDeploy Environment Requirements](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) -- 2. Install FastDeploy Python whl package. Refer to [FastDeploy Python Installation](../../../../../../docs/en/build_and_install/download_prebuilt_libraries.md) - -This directory provides examples that `infer.py` fast finishes the deployment of PPOCRv3 on CPU/GPU and GPU accelerated by TensorRT. The script is as follows - -``` -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -# Download the example code for deployment -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vision/ocr/PP-OCRv3/python/ - -python3 infer_static_shape.py \ - --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - --rec_label_file ./ppocr_keys_v1.txt \ - --image 12.jpg \ - --device cpu - -# NPU推理 -python3 infer_static_shape.py \ - --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ - --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ - --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ - --rec_label_file ppocr_keys_v1.txt \ - --image 12.jpg \ - --device npu -``` - -The visualized result after running is as follows - - - - - -## Other Documents - -- [Python API reference](https://baidu-paddle.github.io/fastdeploy-api/python/html/) -- [PPOCR Model Description](../README.md) -- [PPOCRv3 C++ Deployment](../cpp) -- [Model Prediction Results](../../../../../../docs/api/vision_results/README.md) -- [How to switch the model inference backend engine](../../../../../../docs/en/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/rknpu2/python/README_CN.md b/examples/vision/ocr/PP-OCRv3/rknpu2/python/README_CN.md deleted file mode 100644 index e2f6e3d430..0000000000 --- a/examples/vision/ocr/PP-OCRv3/rknpu2/python/README_CN.md +++ /dev/null @@ -1,62 +0,0 @@ -[English](README.md) | 简体中文 -# PPOCRv3 Python部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../../docs/cn/build_and_install/download_prebuilt_libraries.md) - -本目录下提供`infer.py`快速完成PPOCRv3在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 - -``` - -# 下载模型,图片和字典文件 -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar -xvf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt - -#下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vision/ocr/PP-OCRv3/python/ - -# CPU推理 -python3 infer_static_shape.py \ - --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer.onnx \ - --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v2.0_cls_infer.onnx \ - --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer.onnx \ - --rec_label_file ./ppocr_keys_v1.txt \ - --image 12.jpg \ - --device cpu - -# NPU推理 -python3 infer_static_shape.py \ - --det_model ./ch_PP-OCRv3_det_infer/ch_PP-OCRv3_det_infer_rk3588_unquantized.rknn \ - --cls_model ./ch_ppocr_mobile_v2.0_cls_infer/ch_ppocr_mobile_v20_cls_infer_rk3588_unquantized.rknn \ - --rec_model ./ch_PP-OCRv3_rec_infer/ch_PP-OCRv3_rec_infer_rk3588_unquantized.rknn \ - --rec_label_file ppocr_keys_v1.txt \ - --image 12.jpg \ - --device npu -``` - -运行完成可视化结果如下图所示 - - - - - -## 其它文档 - -- [Python API文档查阅](https://baidu-paddle.github.io/fastdeploy-api/python/html/) -- [PPOCR 系列模型介绍](../README.md) -- [PPOCRv3 C++部署](../cpp) -- [模型预测结果说明](../../../../../../docs/api/vision_results/README_CN.md) -- [如何切换模型推理后端引擎](../../../../../../docs/cn/faq/how_to_change_backend.md) diff --git a/examples/vision/ocr/PP-OCRv3/serving/README.md b/examples/vision/ocr/PP-OCRv3/serving/README.md deleted file mode 100755 index d7ccee2cd0..0000000000 --- a/examples/vision/ocr/PP-OCRv3/serving/README.md +++ /dev/null @@ -1,107 +0,0 @@ -English | [简体中文](README_CN.md) -# PP-OCR Serving Deployment Example - -Before the serving deployment, please confirm - -- 1. Refer to [FastDeploy Serving Deployment](../../../../../serving/README.md) for software and hardware environment requirements and image pull commands - -## Introduction -This document describes how to build an OCR text recognition service with FastDeploy. - -The server must be started in docker, while the client does not need to be in a docker container. - -**The models in the path ($PWD) contain the model configuration and code (the server will load the models and code to start the service), which needs to be mapped to docker.** - -OCR consists of det (detection), cls (classification) and rec (recognition) models. - -The diagram of the serving deployment is shown below, where `pp_ocr` connects to `det_preprocess`、`det_runtime` and `det_postprocess`,`cls_pp` connects to `cls_runtime` and `cls_postprocess`,`rec_pp` connects to `rec_runtime` and `rec_postprocess`. - -In particular, `cls_pp` and `rec_pp` services are called multiple times in `det_postprocess` to realize the classification and identification of the detection results (multiple boxes), and finally return the identification results to users. -

-
- -
-

- -## Usage -### 1. Server -#### 1.1 Docker -```bash -# Download the repository code -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd FastDeploy/examples/vision/ocr/PP-OCRv3/serving/ - -# Dpwnload model, image, and dictionary files -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar && mv ch_PP-OCRv3_det_infer 1 -mv 1/inference.pdiparams 1/model.pdiparams && mv 1/inference.pdmodel 1/model.pdmodel -mv 1 models/det_runtime/ && rm -rf ch_PP-OCRv3_det_infer.tar - -wget https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar -tar xvf ch_ppocr_mobile_v2.0_cls_infer.tar && mv ch_ppocr_mobile_v2.0_cls_infer 1 -mv 1/inference.pdiparams 1/model.pdiparams && mv 1/inference.pdmodel 1/model.pdmodel -mv 1 models/cls_runtime/ && rm -rf ch_ppocr_mobile_v2.0_cls_infer.tar - -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar -tar xvf ch_PP-OCRv3_rec_infer.tar && mv ch_PP-OCRv3_rec_infer 1 -mv 1/inference.pdiparams 1/model.pdiparams && mv 1/inference.pdmodel 1/model.pdmodel -mv 1 models/rec_runtime/ && rm -rf ch_PP-OCRv3_rec_infer.tar - -mkdir models/pp_ocr/1 && mkdir models/rec_pp/1 && mkdir models/cls_pp/1 - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt -mv ppocr_keys_v1.txt models/rec_postprocess/1/ - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg - -# x.y.z represent the image version. Refer to serving document to replace them with numbers -docker pull registry.baidubce.com/paddlepaddle/fastdeploy:x.y.z-gpu-cuda11.4-trt8.4-21.10 -docker run -dit --net=host --name fastdeploy --shm-size="1g" -v $PWD:/ocr_serving registry.baidubce.com/paddlepaddle/fastdeploy:x.y.z-gpu-cuda11.4-trt8.4-21.10 bash -docker exec -it -u root fastdeploy bash -``` - -#### 1.2 Installation (in docker) -```bash -ldconfig -apt-get install libgl1 -``` - -#### 1.3 Start the server (in docker) -```bash -fastdeployserver --model-repository=/ocr_serving/models -``` - -Parameter: - - `model-repository`(required): The storage path of the entire model streaming_pp_tts. - - `http-port`(optional): Port number for the HTTP service. Default: `8000`. This port is not used in this example. - - `grpc-port`(optional): Port number for the GRPC service. Default: `8001`. - - `metrics-port`(optional): Port number for the serer metric. Default: `8002`. This port is not used in this example. - - -### 2. Client -#### 2.1 Installation -```bash -pip3 install tritonclient[all] -``` - -#### 2.2 Send Requests -```bash -python3 client.py -``` - -## Configuration Change - -The current default configuration runs on GPU. If you want to run it on CPU or other inference engines, please modify the configuration in `models/runtime/config.pbtxt`. Refer to [Configuration Document](../../../../../serving/docs/EN/model_configuration-en.md) for more information. - -## Use VisualDL for serving deployment visualization -You can use VisualDL for [serving deployment visualization](../../../../../serving/docs/EN/vdl_management-en.md) , the above model preparation, deployment, configuration modification and client request operations can all be performed based on VisualDL. - -The serving deployment of PP-OCR by VisualDL only needs the following three steps: -```text -1. Load the model repository: ./vision/ocr/PP-OCRv3/serving -2. Download the model resource file: click the det_runtime model, click the version number 1 to add the pre-training model, and select the text recognition model ch_PP-OCRv3_det to download. click the cls_runtime model, click the version number 1 to add the pre-training model, and select the text recognition model ch_ppocr_mobile_v2.0_cls to download. click the rec_runtime model, click the version number 1 to add the pre-training model, and select the text recognition model ch_PP-OCRv3_rec to download. click the rec_postprocess model, click the version number 1 to add the pre-training model, and select the text recognition model ch_PP-OCRv3_rec to download. -3. Start the service: Click the "launch server" button and input the launch parameters. -``` -

- -

diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/README.md b/examples/vision/ocr/PP-OCRv3/sophgo/README.md deleted file mode 100644 index ee3cfc9a93..0000000000 --- a/examples/vision/ocr/PP-OCRv3/sophgo/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# PPOCRv3 SOPHGO C++部署示例 - -## 支持模型列表 - -- PP-OCRv3部署模型实现来自[PP-OCR系列模型列表](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) - -## 准备PPOCRv3部署模型以及转换模型 - -PPOCRv3包括文本框检测模型(ch_PP-OCRv3_det)、方向分类模型(ch_ppocr_mobile_v2.0_cls)、文字识别模型(ch_PP-OCRv3_rec) -SOPHGO-TPU部署模型前需要将以上Paddle模型转换成bmodel模型,我们以ch_PP-OCRv3_det模型为例,具体步骤如下: -- 下载Paddle模型[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) -- Pddle模型转换为ONNX模型,请参考[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX) -- ONNX模型转换bmodel模型的过程,请参考[TPU-MLIR](https://github.com/sophgo/tpu-mlir) - -## 模型转换example - -### 下载ch_PP-OCRv3_det模型,并转换为ONNX模型 -```shell -wget https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar -tar xvf ch_PP-OCRv3_det_infer.tar - -# 修改ch_PP-OCRv3_det模型的输入shape,由动态输入变成固定输入 -python paddle_infer_shape.py --model_dir ch_PP-OCRv3_det_infer \ - --model_filename inference.pdmodel \ - --params_filename inference.pdiparams \ - --save_dir ch_PP-OCRv3_det_infer_fix \ - --input_shape_dict="{'x':[1,3,960,608]}" - -#将固定输入的Paddle模型转换成ONNX模型 -paddle2onnx --model_dir ch_PP-OCRv3_det_infer_fix \ - --model_filename inference.pdmodel \ - --params_filename inference.pdiparams \ - --save_file ch_PP-OCRv3_det_infer_fix.onnx \ - --enable_dev_version True -``` - -### 导出bmodel模型 - -以转换BM1684x的bmodel模型为例子,我们需要下载[TPU-MLIR](https://github.com/sophgo/tpu-mlir)工程,安装过程具体参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)。 -### 1. 安装 -``` shell -docker pull sophgo/tpuc_dev:latest - -# myname1234是一个示例,也可以设置其他名字 -docker run --privileged --name myname1234 -v $PWD:/workspace -it sophgo/tpuc_dev:latest - -source ./envsetup.sh -./build.sh -``` - -### 2. ONNX模型转换为bmodel模型 -``` shell -mkdir ch_PP-OCRv3_det && cd ch_PP-OCRv3_det - -#在该文件中放入测试图片,同时将上一步转换的ch_PP-OCRv3_det_infer_fix.onnx放入该文件夹中 -cp -rf ${REGRESSION_PATH}/dataset/COCO2017 . -cp -rf ${REGRESSION_PATH}/image . -#放入onnx模型文件ch_PP-OCRv3_det_infer_fix.onnx - -mkdir workspace && cd workspace - -#将ONNX模型转换为mlir模型,其中参数--output_names可以通过NETRON查看 -model_transform.py \ - --model_name ch_PP-OCRv3_det \ - --model_def ../ch_PP-OCRv3_det_infer_fix.onnx \ - --input_shapes [[1,3,960,608]] \ - --mean 0.0,0.0,0.0 \ - --scale 0.0039216,0.0039216,0.0039216 \ - --keep_aspect_ratio \ - --pixel_format rgb \ - --output_names sigmoid_0.tmp_0 \ - --test_input ../image/dog.jpg \ - --test_result ch_PP-OCRv3_det_top_outputs.npz \ - --mlir ch_PP-OCRv3_det.mlir - -#将mlir模型转换为BM1684x的F32 bmodel模型 -model_deploy.py \ - --mlir ch_PP-OCRv3_det.mlir \ - --quantize F32 \ - --chip bm1684x \ - --test_input ch_PP-OCRv3_det_in_f32.npz \ - --test_reference ch_PP-OCRv3_det_top_outputs.npz \ - --model ch_PP-OCRv3_det_1684x_f32.bmodel -``` -最终获得可以在BM1684x上能够运行的bmodel模型ch_PP-OCRv3_det_1684x_f32.bmodel。按照上面同样的方法,可以将ch_ppocr_mobile_v2.0_cls,ch_PP-OCRv3_rec转换为bmodel的格式。如果需要进一步对模型进行加速,可以将ONNX模型转换为INT8 bmodel,具体步骤参见[TPU-MLIR文档](https://github.com/sophgo/tpu-mlir/blob/master/README.md)。 - -## 其他链接 -- [Cpp部署](./cpp) diff --git a/examples/vision/ocr/PP-OCRv3/sophgo/cpp/README.md b/examples/vision/ocr/PP-OCRv3/sophgo/cpp/README.md deleted file mode 100644 index 22898e60a1..0000000000 --- a/examples/vision/ocr/PP-OCRv3/sophgo/cpp/README.md +++ /dev/null @@ -1,58 +0,0 @@ -# PPOCRv3 C++部署示例 - -本目录下提供`infer.cc`快速完成PPOCRv3模型在SOPHGO BM1684x板子上加速部署的示例。 - -在部署前,需确认以下两个步骤: - -1. 软硬件环境满足要求 -2. 根据开发环境,从头编译FastDeploy仓库 - -以上步骤请参考[SOPHGO部署库编译](../../../../../../docs/cn/build_and_install/sophgo.md)实现 - -## 生成基本目录文件 - -该例程由以下几个部分组成 -```text -. -├── CMakeLists.txt -├── build # 编译文件夹 -├── image # 存放图片的文件夹 -├── infer.cc -└── model # 存放模型文件的文件夹 -``` - -## 编译 - -### 编译并拷贝SDK到thirdpartys文件夹 - -请参考[SOPHGO部署库编译](../../../../../../docs/cn/build_and_install/sophgo.md)仓库编译SDK,编译完成后,将在build目录下生成fastdeploy-0.0.3目录. - -### 拷贝bmodel模型文至model文件夹 -将Paddle模型转换为SOPHGO bmodel模型,转换步骤参考[文档](../README.md) -将转换后的SOPHGO bmodel模型文件拷贝至model中 - -### 准备测试图片至image文件夹,以及字典文件 -```bash -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/doc/imgs/12.jpg -cp 12.jpg image/ - -wget https://gitee.com/paddlepaddle/PaddleOCR/raw/release/2.6/ppocr/utils/ppocr_keys_v1.txt -``` - -### 编译example - -```bash -cd build -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-0.0.3 -make -``` - -## 运行例程 - -```bash -./infer_demo model ./ppocr_keys_v1.txt image/12.jpeg -``` - - -- [模型介绍](../../../) -- [模型转换](../) diff --git a/examples/vision/ocr/PP-OCRv3/web/README.md b/examples/vision/ocr/PP-OCRv3/web/README.md deleted file mode 100644 index 3e497d37d0..0000000000 --- a/examples/vision/ocr/PP-OCRv3/web/README.md +++ /dev/null @@ -1,39 +0,0 @@ -English | [简体中文](README_CN.md) -# PP-OCRv3 Frontend Deployment Example - -This document introduces the deployment of PaddleOCR's PP-OCRv3 models to run in the browser, and the js interface in the @paddle-js-models/ocr npm package. - - -## Frontend Deployment PP-OCRv3 Model - -For PP-OCRv3 model web demo, refer to [**reference document**](../../../../application/js/web_demo/) - - -## PP-OCRv3 js Interface - -``` -import * as ocr from "@paddle-js-models/ocr"; -await ocr.init(detConfig, recConfig); -const res = await ocr.recognize(img, option, postConfig); -``` -ocr model loading and initialization, where the model is in Paddle.js model format. For the conversion of js models, refer to [the document](../../../../application/js/web_demo/README.md) - -**init function parameter** - -> * **detConfig**(dict): The configuration parameter for the text detection model. Default {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_det_infer_js_960/model.json', fill: '#fff', mean: [0.485, 0.456, 0.406],std: [0.229, 0.224, 0.225]}; Among them, modelPath is the path of the text detection model, fill is the padding value in the image pre-processing, and mean/std are the mean and standard deviation in the pre-processing. -> * **recConfig**(dict)): The configuration parameter for the text recognition model. Default {modelPath: 'https://js-models.bj.bcebos.com/PaddleOCR/PP-OCRv3/ch_PP-OCRv3_rec_infer_js/model.json', fill: '#000', mean: [0.5, 0.5, 0.5], std: [0.5, 0.5, 0.5]}; Among them, modelPath is the path of the text detection model, fill is the padding value in the image pre-processing, and mean/std are the mean and standard deviation in the pre-processing. - - -**recognize function parameter** - -> * **img**(HTMLImageElement): Enter an image parameter in HTMLImageElement. -> * **option**(dict): The canvas parameter of the visual text detection box. No need to set. -> * **postConfig**(dict): Text detection post-processing parameter. Default: {shape: 960, thresh: 0.3, box_thresh: 0.6, unclip_ratio:1.5}; thresh is the binarization threshold of the output prediction image. box_thresh is the threshold of the output box, below which the prediction box will be discarded. unclip_ratio is the expansion ratio of the output box. - -## Other Documents - -- [PP-OCR Model Description](../../) -- [PP-OCRv3 C++ Deployment](../cpp) -- [Model Prediction Results](../../../../../docs/api/vision_results/) -- [How to switch the model inference backend engine](../../../../../docs/en/faq/how_to_change_backend.md) -- [PP-OCRv3 Wechat mini-program deployment document](../mini_program/) diff --git a/examples/vision/ocr/README.md b/examples/vision/ocr/README.md deleted file mode 100644 index 97f0d61460..0000000000 --- a/examples/vision/ocr/README.md +++ /dev/null @@ -1,20 +0,0 @@ -English | [简体中文](README_CN.md) -# PaddleOCR Model Deployment - -## PaddleOCR contains a series of tasks with multiple models, including -- Text detection `DBDetector` -- [Optional] Direction classification `Classifer` is used to adjust the direction of images before text recognition -- Character recognition `Recognizer` is used to recognize characters from images - -According to different scenarios, FastDeploy provides the following OCR task deployment. Users need to download three models and dictionary files (or two, optional classifier) simultaneously to complete the entire OCR prediction process - -### PP-OCR Model in English and Chinese Scenarios -The model download links in the following table are provided by PaddleOCR model library. Refer to [PP-OCR Model List](https://github.com/PaddlePaddle/PaddleOCR/blob/release/2.6/doc/doc_ch/models_list.md) for details - -| OCR version | Text box detection | Direction classification model | Character recognition | Dictionary file | Note | -|:----|:----|:----|:----|:----|:--------| -| ch_PP-OCRv3[Recommended] |[ch_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv3 Original Ultra-Lightweight Model supports text detection in Chinese, English and multiple languages | -| en_PP-OCRv3[Recommended] |[en_PP-OCRv3_det](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [en_PP-OCRv3_rec](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) | [en_dict.txt](https://bj.bcebos.com/paddlehub/fastdeploy/en_dict.txt) | OCRv3 Original Ultra-Lightweight Model supports English and digital recognition. Its training data of detection model and recognition model is different from that of Chinese model, and no other differences can be detected | -| ch_PP-OCRv2 |[ch_PP-OCRv2_det](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_PP-OCRv2_rec](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2 Original Ultra-Lightweight Model supports text detection in Chinese, English and multiple languages | -| ch_PP-OCRv2_mobile |[ch_ppocr_mobile_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_mobile_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) | [ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2 Original Ultra-Lightweight Model Supports text detection in Chinese, English and multiple languages with lighter weight than PPOCRv2 | -| ch_PP-OCRv2_server |[ch_ppocr_server_v2.0_det](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) | [ch_ppocr_mobile_v2.0_cls](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) | [ch_ppocr_server_v2.0_rec](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) |[ppocr_keys_v1.txt](https://bj.bcebos.com/paddlehub/fastdeploy/ppocr_keys_v1.txt) | OCRv2 Server Model supports text detection in Chinese, English and multiple languages. It has better effects though being larger than the ultra-lightweight model |