We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
#include <opencv2/opencv.hpp> #include <opencv2/core/utils/logger.hpp> #include <iostream> using namespace std; extern "C" { int inference_cur_v6_cap(const char* video_path, int flag_acc, char* result_buffer, int buffer_size, char* img_path_buffer) { // - - - - - - - - 1. 加载模型 - - - - - - - - // - - - - - - - - 2. 读取视频流 - - - - - - - - cv::VideoCapture cap(video_path); if (!cap.isOpened()) { std::cerr << "Error: Could not open video file.\n"; return -1; } // 获取视频的宽度、高度和帧率 int vid_width = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_WIDTH)); int vid_height = static_cast<int>(cap.get(cv::CAP_PROP_FRAME_HEIGHT)); double fps = cap.get(cv::CAP_PROP_FPS); printf("load video = %s , %d x %d @%.2f\n", video_path, vid_width, vid_height, fps); // add image path to buffer std::ostringstream oss; oss << "OpenCV Version: " << cv::getVersionString() << "\n\t load video = " << video_path << " , " << vid_width << " x " << vid_height << " @" << std::fixed << std::setprecision(2) << fps << "\n"; string vinfo = oss.str(); strncpy(img_path_buffer, vinfo.c_str(), buffer_size - 1); //strncpy_s(img_path_buffer, buffer_size, vinfo.c_str(), _TRUNCATE); img_path_buffer[buffer_size - 1] = '\0'; // 确保以 null 结尾 // 结果解析 // 获取当前视频后端的名称(OpenCV 4.5.5 兼容) std::string vid_results = "Current backend: " + cap.getBackendName(); //vid_results = "Current backend: "; // 数据回传拷贝 strncpy(result_buffer, vid_results.c_str(), buffer_size - 1); //strncpy_s(result_buffer, buffer_size, vid_results.c_str(), _TRUNCATE); result_buffer[buffer_size - 1] = '\0'; // 确保以 null 结尾 return 0; } }
我在Intel(R) Xeon(R) CPU E5-2686 v4 的 ubuntu18系统上进行动态库编译,将编译好的so库供接口调用。我在本地环境正常编译(方式1) 和 交叉编译成arm64-v8a架构的动态库(方式2) 获得了完全不一样的结果。
方式1:使用opencv4.5.5(官方普通版本) 读取同一个视频拿到的结果是: load video = assets/5.mp4 , 1920 x 1080 @27.26 方式2:使用opencv4.5.5(官方下载的opencv-4.5.5-android-sdk.zip)交叉编译成so库给 rk3576主板的安卓app进行调用 读取同一个视频拿到的结果是: /data/user/0/com.example.xapp/cache/AI/default5.mp4 , 0 x 0 @0.00 default5.mp4
补充: 1)这两个方式虽然路径不同,但是两个视频完全一致,并且视频完全正常能播放 2)我分析两个环境产生不同的结果,打印 vid_results 对比发现 std::string vid_results = "Current backend: " + cap.getBackendName(); 方式1输出:Current backend: FFMPEG 方式2输出:Current backend: ANDROID_NATIVE 3)我这个动态库本身是集成了 opencv的部分能力,方式1和方式2我测试读取图片是完全OK的,读取视频失败
显然安卓调用视频流IO用的是ANDROID_NATIVE导致失败
尝试命令 export FFMPEG_ROOT=/home/tr/opencv_cross/ffmpeg export ANDROID_NDK=~/opencv_cross/android-ndk-r21e export TOOLCHAIN=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64 export PATH=$TOOLCHAIN/bin:$PATH
cmake -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK/build/cmake/android.toolchain.cmake -DANDROID_ABI=arm64-v8a -DANDROID_NATIVE_API_LEVEL=24 -DANDROID_NDK=$ANDROID_NDK -DWITH_FFMPEG=ON -DENABLE_CONFIG_VERIFICATION=OFF -DBUILD_TESTS=OFF -DFFMPEG_FOUND=TRUE -DHAVE_FFMPEG=ON -DHAVE_FFMPEG_AVCODEC=ON -DHAVE_FFMPEG_AVFORMAT=ON -DHAVE_FFMPEG_AVUTIL=ON -DHAVE_FFMPEG_SWSCALE=ON -DHAVE_FFMPEG_SWRESAMPLE=ON -DFFMPEG_LIBRARIES=" $FFMPEG_ROOT/android/aarch64/lib/libavcodec.a; $FFMPEG_ROOT/android/aarch64/lib/libavformat.a; $FFMPEG_ROOT/android/aarch64/lib/libavutil.a; $FFMPEG_ROOT/android/aarch64/lib/libswscale.a; $FFMPEG_ROOT/android/aarch64/lib/libswresample.a" -DFFMPEG_INCLUDE_DIRS="$FFMPEG_ROOT/android/aarch64/include" -DBUILD_SHARED_LIBS=OFF -DENABLE_PIC=ON -DCMAKE_SHARED_LINKER_FLAGS=" -Wl,--whole-archive ${FFMPEG_ROOT}/android/aarch64/lib/libavcodec.a ${FFMPEG_ROOT}/android/aarch64/lib/libavformat.a -Wl,--no-whole-archive -landroid -lmediandk -llog -lz" -DZLIB_LIBRARY=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android/libz.a -DZLIB_INCLUDE_DIR=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include -DBUILD_opencv_java=OFF -DBUILD_ANDROID_PROJECTS=OFF -DBUILD_ANDROID_EXAMPLES=OFF -DOPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.5.5/modules -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=./install ..
make
输出阐述: 1)cmake 和 make 都能成功 2)但是cmake 显示 FFMPEG 构建为NO!!! -- JPEG 2000: build (ver 2.4.0) -- OpenEXR: build (ver 2.3.0) -- HDR: YES -- SUNRASTER: YES -- PXM: YES -- PFM: YES -- -- Video I/O: -- FFMPEG: NO -- avcodec: NO -- avformat: NO -- avutil: NO -- swscale: NO -- avresample: NO
我已经花了两天时间尝试性去解决这个问题,但都失败了[哭哭 苦苦] 请问该怎么样编译才能使得ARM64-v8a架构的opencv带ffmpeg,你们有遇到这个问题吗?
The text was updated successfully, but these errors were encountered:
No branches or pull requests
Uh oh!
There was an error while loading. Please reload this page.
1.源代码如下
2.遇到问题如下
我在Intel(R) Xeon(R) CPU E5-2686 v4 的 ubuntu18系统上进行动态库编译,将编译好的so库供接口调用。我在本地环境正常编译(方式1) 和 交叉编译成arm64-v8a架构的动态库(方式2) 获得了完全不一样的结果。
方式1:使用opencv4.5.5(官方普通版本)
读取同一个视频拿到的结果是: load video = assets/5.mp4 , 1920 x 1080 @27.26
方式2:使用opencv4.5.5(官方下载的opencv-4.5.5-android-sdk.zip)交叉编译成so库给 rk3576主板的安卓app进行调用
读取同一个视频拿到的结果是: /data/user/0/com.example.xapp/cache/AI/default5.mp4 , 0 x 0 @0.00 default5.mp4
补充:
1)这两个方式虽然路径不同,但是两个视频完全一致,并且视频完全正常能播放
2)我分析两个环境产生不同的结果,打印 vid_results 对比发现
std::string vid_results = "Current backend: " + cap.getBackendName();
方式1输出:Current backend: FFMPEG
方式2输出:Current backend: ANDROID_NATIVE
3)我这个动态库本身是集成了 opencv的部分能力,方式1和方式2我测试读取图片是完全OK的,读取视频失败
显然安卓调用视频流IO用的是ANDROID_NATIVE导致失败
3.尝试解决 【Ubuntu 18.04 系统上为 ARM64-v8a 架构交叉编译 OpenCV 4.5.5(带 FFmpeg 支持)】
尝试命令
export FFMPEG_ROOT=/home/tr/opencv_cross/ffmpeg
export ANDROID_NDK=~/opencv_cross/android-ndk-r21e
export TOOLCHAIN=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64
export PATH=$TOOLCHAIN/bin:$PATH
cmake
-DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK/build/cmake/android.toolchain.cmake
-DANDROID_ABI=arm64-v8a
-DANDROID_NATIVE_API_LEVEL=24
-DANDROID_NDK=$ANDROID_NDK
-DWITH_FFMPEG=ON
-DENABLE_CONFIG_VERIFICATION=OFF
-DBUILD_TESTS=OFF
-DFFMPEG_FOUND=TRUE
-DHAVE_FFMPEG=ON
-DHAVE_FFMPEG_AVCODEC=ON
-DHAVE_FFMPEG_AVFORMAT=ON
-DHAVE_FFMPEG_AVUTIL=ON
-DHAVE_FFMPEG_SWSCALE=ON
-DHAVE_FFMPEG_SWRESAMPLE=ON
-DFFMPEG_LIBRARIES="
$FFMPEG_ROOT/android/aarch64/lib/libavcodec.a;
$FFMPEG_ROOT/android/aarch64/lib/libavformat.a;
$FFMPEG_ROOT/android/aarch64/lib/libavutil.a;
$FFMPEG_ROOT/android/aarch64/lib/libswscale.a;
$FFMPEG_ROOT/android/aarch64/lib/libswresample.a"
-DFFMPEG_INCLUDE_DIRS="$FFMPEG_ROOT/android/aarch64/include"
-DBUILD_SHARED_LIBS=OFF
-DENABLE_PIC=ON
-DCMAKE_SHARED_LINKER_FLAGS="
-Wl,--whole-archive
${FFMPEG_ROOT}/android/aarch64/lib/libavcodec.a
${FFMPEG_ROOT}/android/aarch64/lib/libavformat.a
-Wl,--no-whole-archive
-landroid
-lmediandk
-llog
-lz"
-DZLIB_LIBRARY=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/aarch64-linux-android/libz.a
-DZLIB_INCLUDE_DIR=$ANDROID_NDK/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/include
-DBUILD_opencv_java=OFF
-DBUILD_ANDROID_PROJECTS=OFF
-DBUILD_ANDROID_EXAMPLES=OFF
-DOPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-4.5.5/modules
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_INSTALL_PREFIX=./install
..
make
输出阐述:
1)cmake 和 make 都能成功
2)但是cmake 显示 FFMPEG 构建为NO!!!
-- JPEG 2000: build (ver 2.4.0)
-- OpenEXR: build (ver 2.3.0)
-- HDR: YES
-- SUNRASTER: YES
-- PXM: YES
-- PFM: YES
--
-- Video I/O:
-- FFMPEG: NO
-- avcodec: NO
-- avformat: NO
-- avutil: NO
-- swscale: NO
-- avresample: NO
我已经花了两天时间尝试性去解决这个问题,但都失败了[哭哭 苦苦]
请问该怎么样编译才能使得ARM64-v8a架构的opencv带ffmpeg,你们有遇到这个问题吗?
The text was updated successfully, but these errors were encountered: