Error: quot;Fatal signal 11 (SIGSEGV), code 1quot; when passing Mat object from java to jni function(错误:“致命信号 11 (SIGSEGV),代码 1将 Mat 对象从 java 传递给 jni 函数时)
问题描述
我正在使用 OpenCV 函数运行摄像机.我将 Mat 对象传递给它工作了一段时间的 jni 函数,它们是错误:
I am running the video camera using the OpenCV function. I pass the Mat object to the jni function it works for awhile, them the error:
10-10 13:03:17.978: A/libc(28693): Fatal signal 11 (SIGSEGV), code 1, fault addr 0x9 in tid 28791 (Thread-5418)
运行相机并调用jni函数的Java代码:
Java code that runs the camera and calls the jni function:
package com.adhamenaya;
import java.util.ArrayList;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
//import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
public class MainActivity extends Activity implements CvCameraViewListener2,
        OnTouchListener {
    private static final String TAG = "OCVSample::Activity";
    private Mat mRgba;
    private Mat mGray;
    private CameraBridgeViewBase mOpenCvCameraView;
    private ArrayList<Mat> mats = new ArrayList<Mat>();
    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
            case LoaderCallbackInterface.SUCCESS: {
                Log.i(TAG, "OpenCV loaded successfully");
                mOpenCvCameraView.enableView();
                mOpenCvCameraView.setOnTouchListener(MainActivity.this);
            }
                break;
            default: {
                super.onManagerConnected(status);
            }
                break;
            }
        }
    };
    public MainActivity() {
        Log.i(TAG, "Instantiated new " + this.getClass());
    }
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        Log.i(TAG, "called onCreate");
        super.onCreate(savedInstanceState);
        Native.loadlibs();
        mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.cam_view);
        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
        mOpenCvCameraView.setCvCameraViewListener(this);
        Native.setup(mFaceCascadeFile, mNoseCascadeFile, mLandmarks);
    }
    @Override
    public void onPause() {
        super.onPause();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }
    @Override
    public void onResume() {
        super.onResume();
        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,
                mLoaderCallback);
    }
    public void onDestroy() {
        super.onDestroy();
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }
    public void onCameraViewStarted(int width, int height) {
        mGray = new Mat();
        mRgba = new Mat();
    }
    public void onCameraViewStopped() {
    }
    public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
        mRgba = inputFrame.rgba();
        Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_BGRA2GRAY);
        Native.runJni(mFaceCascadeFile, mNoseCascadeFile, mLandmarks,
                mRgba.getNativeObjAddr());
        return mRgba;
    }
    @Override
    public boolean onTouch(View v, MotionEvent event) {
        // TODO Auto-generated method stub
        return false;
    }
}
Jni 函数:
JNIEXPORT jbyteArray JNICALL Java_com_adhamenaya_Native_runJni(JNIEnv * env,
        jobject obj, jstring faceCascadeFile, jstring noseCascadeFile,
        jstring landmarks, jlong frame) {
    cv::Mat& inFrame = *(cv::Mat*) frame;
    if (!gsys.loadFaceCascade(faceCascadeFnameStr)) {
        LOG("Could not load face cascade");
        gsys.loadFaceCascade(faceCascadeFnameStr);
    } else {
        LOG("Face cascade: OK");
    }
    if (!gsys.loadNoseCascade(noseCascadeFnameStr)) {
        LOG("Could not load nose cascade");
        gsys.loadNoseCascade(noseCascadeFnameStr);
    } else {
        LOG("Nose cascade: OK");
    }
    gsys.setFrameRate(30);
    gsys.setProgramState(DETECT);
    clock_t tin, tout = 0;
    cv::flip(inFrame, inFrame, 0);
    cv::transpose(inFrame, inFrame);
    dlib::shape_predictor pose_model;
    dlib::deserialize(landmarksStr) >> pose_model;
     gsys.setCurrentFrame(inFrame);
     tin = clock();
     trigger_hr(gsys, faces, pose_model);
     // Process the frame
     size_t spm;
     float motionStrengthX, motionStrengthY;
     float phiYaw = -0xFFFFFFFF, thetaPitch = -0xFFFFFFFF;
     if (faces.size()) {
     faces[0].getSpm(gsys, spm, motionStrengthX, motionStrengthY);
     faces[0].getFacePose(phiYaw, thetaPitch);
     }
     tout = tout + clock() - tin;
     if ((gsys.getFrameCount() % 30) == 29) {
     double secs_between_frames = (double) (tout) / (CLOCKS_PER_SEC * 30.0f);
     printf("FPS = %2.2f
", 1.0f / secs_between_frames);
     LOG("FPS = %2.2f ", 1.0f / secs_between_frames);
     tout = 0;
     }
     char spmText[100];
     //sprintf(spmText,
     //     "SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm,
     //     phiYaw, thetaPitch, motionStrengthX, motionStrengthY);
     LOG("SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm,
     phiYaw, thetaPitch, motionStrengthX, motionStrengthY);
    std::string str;
    str = "SPM=";
    jbyteArray arr = env->NewByteArray(str.length());
    env->SetByteArrayRegion(arr, 0, str.length(), (jbyte*) str.c_str());
    return arr;
}
请帮助我.
推荐答案
经过两天的在线搜索,我发现问题是因为内存泄漏",当我从一个视频并发送给他们 jni 函数,完成工作后不释放帧,所以我总是会把帧留在内存中.
After two days of searching online, I could figure out that the problem is because of the 'Memory leak', and this happens when I am reading the frames from a video and send them the jni function, without releasing frames after finishing working on them, so always I will have the frames in the memory.
我所做的是将C++代码中的Mat对象移到函数作用域之外,使其成为类作用域对象,这样每次函数执行时都不会创建新对象叫.
What I did is to move the Mat object in the C++ code outside the scope of the function and make it a class scope object, so it will not create a new object each time the function is called. 
另外,我打电话给:
inFrame.release();
在完成工作后释放内存.
to free the memory after finishing working on it.
这篇关于错误:“致命信号 11 (SIGSEGV),代码 1"将 Mat 对象从 java 传递给 jni 函数时的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持编程学习网!
本文标题为:错误:“致命信号 11 (SIGSEGV),代码 1"将 Mat 对
				
        
 
            
        - MalformedJsonException:在第1行第1列路径中使用JsonReader.setLenient(True)接受格式错误的JSON 2022-01-01
 - Android viewpager检测滑动超出范围 2022-01-01
 - 如何检查发送到 Android 应用程序的 Firebase 消息的传递状态? 2022-01-01
 - 在测试浓缩咖啡时,Android设备不会在屏幕上启动活动 2022-01-01
 - Android - 我如何找出用户有多少未读电子邮件? 2022-01-01
 - Android - 拆分 Drawable 2022-01-01
 - 想使用ViewPager,无法识别android.support.*? 2022-01-01
 - 用 Swift 实现 UITextFieldDelegate 2022-01-01
 - android 4中的android RadioButton问题 2022-01-01
 - 使用自定义动画时在 iOS9 上忽略 edgesForExtendedLayout 2022-01-01
 
				
				
				
				