繁体   English   中英

录制中的java.lang.ExceptionInInitializerError

[英]java.lang.ExceptionInInitializerError in Recording

我在编译程序时遇到此错误。 我尝试解决它,但失败了。 有人可以帮我吗? 谢谢。

进程:com.detection,PID:6403在com.googlecode.javacpp.Loader处的java.lang.Class.forName(Class.java:324)处的java.lang.Class.classForName(Native Method)处的java.lang.ExceptionInInitializerError。在com.googlecode.javacpp.Loader处加载(Loader.java:561)在com.googlecode.javacv.cpp.opencv_core $ CvArr。(opencv_core.java:156)处加载com.googlecode.javacv.cpp.opencv_core $ CvArr。(opencv_core.java:156) android.app处com.detection.MainActivity.initRecorder(MainActivity.java:188)处com.detection.MainActivity.onCreate(MainActivity.java:90)处的javacv.cpp.opencv_core $ IplImage.create(opencv_core.java:421) android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1136)的.Activity.performCreate(Activity.java:6904)android.app.ActivityThread.handleLaunchActivity(.android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3267) android.app.ActivityThread.access $ 1100(ActivityThread.java:229)的ActivityThread.java:3416)android.os.Handler的android.app.ActivityThread $ H.handleMessage(ActivityThread.java:1821)的ActivityThread.java:3416) .dispatchMessage(Handler.java:102)在android.os.Looper.loop(Looper.java:148)在android.app.ActivityThread.main(ActivityThread.java:7407)在java.lang.reflect.Method.invoke( com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:1230)处的com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)处的本机方法)原因:java.lang。 NullPointerException:尝试在com.googlecode.javacpp.Loader.sizeof(Loader.java:837)上的空对象引用上调用虚拟方法'java.lang.Object java.util.HashMap.get(java.lang.Object)'。在com.googlecode.javacv.cpp.opencv_core。(opencv_core.java:1259)在java.lang.Class.classForName(本机方法)在com.googlecode处java.lang.Class.forName(Class.java:324)在。 com.googlecode.javacpp.Loader.load(Loader.java:540)处com.googlecode.javacv.cpp.opencv_core $ CvArr。(opencv_core.java:156)处的javacpp.Loader.load(Loader.java:561) com.detect.MainAc上的com.googlecode.javacv.cpp.opencv_core $ IplImage.create(opencv_core.java:421) 在com.detection.MainActivity.onCreate(MainActivity.java:90)在android.app.Activity.performCreate(Activity.java:6904)处的tivity.initRecorder(MainActivity.java:188)在android.app.Instrumentation.callActivityOnCreate(Instrumentation .java:1136),位于android.app.ActivityThread.performLaunchActivity(ActivityThread.java:3267),位于android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:3416),位于android.app.ActivityThread.access $ 1100(ActivityThread.java:229) )在android.app.ActivityThread $ H.handleMessage(ActivityThread.java:1821)在android.os.Handler.dispatchMessage(Handler.java:102)在android.os.Looper.loop(Looper.java:148) .com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:1230)的java.lang.reflect.Method.invoke(本机方法)处的.app.ActivityThread.main(ActivityThread.java:7407) .android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)08-17 15:52:37.291 6403-6411 / com.detection E / System:终结器抛出未捕获的异常 08-17 15:52:37.291 6403-6411 / com.detection E / System:java.lang.IllegalStateException:活页夹已完成! 在android.os.BinderProxy.transactNative(本机方法)在android.os.BinderProxy.transact(Binder.java:503)在android.os.IPowerManager $ Stub $ Proxy.releaseWakeLock(IPowerManager.java:686)在android.os .java的.PowerManager $ WakeLock.finalize(PowerManager.java:1627)(位于java.lang.Daemons $ FinalizerDaemon.doFinalize(Daemons.java:217)(位于java.lang.Daemons $ FinalizerDaemon.run(Daemons.java:200)))。 lang.Thread.run(Thread.java:818)

我的build.gradle是

 apply plugin: 'com.android.application' android { compileSdkVersion 25 buildToolsVersion "26.0.0" defaultConfig { applicationId "com.detection" minSdkVersion 23 targetSdkVersion 25 versionCode 1 versionName "1.0" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" externalNativeBuild { cmake { cppFlags "" } } ndk { abiFilters "armeabi-v7a", "x86", "armeabi", "mips" } } buildTypes { release { minifyEnabled false proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro' } } externalNativeBuild { cmake { path "CMakeLists.txt" } } sourceSets { main { jni.srcDirs = ['src/main/jni', 'src/main/jniLibs/'] } } } dependencies { compile fileTree(include: ['*.jar'], dir: 'libs') androidTestCompile('com.android.support.test.espresso:espresso-core:2.2.2', { exclude group: 'com.android.support', module: 'support-annotations' }) compile 'com.android.support:appcompat-v7:25.3.1' compile 'com.android.support.constraint:constraint-layout:1.0.2' testCompile 'junit:junit:4.12' compile project(':openCVLibrary320') compile files('libs/javacv.jar') compile files('libs/javacpp.jar') compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.2.0-1.3' } 

这是我的代码。

 package com.detection; import android.app.Activity; import android.content.Context; import android.content.pm.ActivityInfo; import android.hardware.Camera; import android.media.AudioFormat; import android.media.AudioRecord; import android.media.MediaRecorder; import android.os.PowerManager; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.util.Log; import android.view.Display; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.view.View; import android.view.WindowManager; import android.widget.Button; import android.widget.LinearLayout; import android.widget.RelativeLayout; import com.googlecode.javacv.FFmpegFrameRecorder; import com.googlecode.javacv.cpp.opencv_core; import java.io.IOException; import java.nio.ShortBuffer; import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U; public class MainActivity extends Activity implements View.OnClickListener { private final static String CLASS_LABEL = "RecordActivity"; private final static String LOG_TAG = CLASS_LABEL; private PowerManager.WakeLock mWakeLock; private String ffmpeg_link = "/mnt/sdcard/stream.flv"; long startTime = 0; boolean recording = false; private volatile FFmpegFrameRecorder recorder; private boolean isPreviewOn = false; private int sampleAudioRateInHz = 44100; private int imageWidth = 320; private int imageHeight = 240; private int frameRate = 30; /* audio data getting thread */ private AudioRecord audioRecord; private AudioRecordRunnable audioRecordRunnable; private Thread audioThread; volatile boolean runAudioThread = true; /* video data getting thread */ private Camera cameraDevice; private CameraView cameraView; private opencv_core.IplImage yuvIplimage = null; /* layout setting */ private final int bg_screen_bx = 232; private final int bg_screen_by = 128; private final int bg_screen_width = 700; private final int bg_screen_height = 500; private final int bg_width = 1123; private final int bg_height = 715; private final int live_width = 640; private final int live_height = 480; private int screenWidth, screenHeight; private Button btnRecorderControl; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); setContentView(R.layout.activity_main); PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL); mWakeLock.acquire(); initLayout(); initRecorder(); } @Override protected void onResume() { super.onResume(); if (mWakeLock == null) { PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL); mWakeLock.acquire(); } } @Override protected void onPause() { super.onPause(); if (mWakeLock != null) { mWakeLock.release(); mWakeLock = null; } } @Override protected void onDestroy() { super.onDestroy(); recording = false; if (cameraView != null) { cameraView.stopPreview(); } if(cameraDevice != null) { cameraDevice.stopPreview(); cameraDevice.release(); cameraDevice = null; } if (mWakeLock != null) { mWakeLock.release(); mWakeLock = null; } } private void initLayout() { /* get size of screen */ Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); screenWidth = display.getWidth(); screenHeight = display.getHeight(); RelativeLayout.LayoutParams layoutParam = null; LayoutInflater myInflate = null; myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE); RelativeLayout topLayout = new RelativeLayout(this); setContentView(topLayout); LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_main, null); layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight); topLayout.addView(preViewLayout, layoutParam); /* add control button: start and stop */ btnRecorderControl = (Button) findViewById(R.id.recorder_control); btnRecorderControl.setText("Start"); btnRecorderControl.setOnClickListener(this); /* add camera view */ int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width); int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height); int prev_rw, prev_rh; if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) { prev_rh = display_height_d; prev_rw = (int) (1.0 * display_height_d * live_width / live_height); } else { prev_rw = display_width_d; prev_rh = (int) (1.0 * display_width_d * live_height / live_width); } layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh); layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height); layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width); cameraDevice = Camera.open(); Log.i(LOG_TAG, "cameara open"); cameraView = new CameraView(this, cameraDevice); topLayout.addView(cameraView, layoutParam); Log.i(LOG_TAG, "cameara preview start: OK"); } //--------------------------------------- // initialize ffmpeg_recorder //--------------------------------------- private void initRecorder() { Log.w(LOG_TAG,"init recorder"); if (yuvIplimage == null) { yuvIplimage = opencv_core.IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2); Log.i(LOG_TAG, "create yuvIplimage"); } Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link); recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); recorder.setFormat("flv"); recorder.setSampleRate(sampleAudioRateInHz); // Set in the surface changed method recorder.setFrameRate(frameRate); Log.i(LOG_TAG, "recorder initialize success"); audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); runAudioThread = true; } public void startRecording() { try { recorder.start(); startTime = System.currentTimeMillis(); recording = true; audioThread.start(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } } public void stopRecording() { runAudioThread = false; try { audioThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } audioRecordRunnable = null; audioThread = null; if (recorder != null && recording) { recording = false; Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder"); try { recorder.stop(); recorder.release(); } catch (FFmpegFrameRecorder.Exception e) { e.printStackTrace(); } recorder = null; } } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { if (recording) { stopRecording(); } finish(); return true; } return super.onKeyDown(keyCode, event); } //--------------------------------------------- // audio thread, gets and encodes audio data //--------------------------------------------- class AudioRecordRunnable implements Runnable { @Override public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); // Audio int bufferSize; short[] audioData; int bufferReadResult; bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); audioData = new short[bufferSize]; Log.d(LOG_TAG, "audioRecord.startRecording()"); audioRecord.startRecording(); /* ffmpeg_audio encoding loop */ while (runAudioThread) { //Log.v(LOG_TAG,"recording? " + recording); bufferReadResult = audioRecord.read(audioData, 0, audioData.length); if (bufferReadResult > 0) { Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult); // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!! // Why? Good question... if (recording) { try { recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult)); //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } Log.v(LOG_TAG,"AudioThread Finished, release audioRecord"); /* encoding finish, release recorder */ if (audioRecord != null) { audioRecord.stop(); audioRecord.release(); audioRecord = null; Log.v(LOG_TAG,"audioRecord released"); } } } //--------------------------------------------- // camera thread, gets and encodes video data //--------------------------------------------- class CameraView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback { private SurfaceHolder mHolder; private Camera mCamera; public CameraView(Context context, Camera camera) { super(context); Log.w("camera","camera view"); mCamera = camera; mHolder = getHolder(); mHolder.addCallback(CameraView.this); mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); mCamera.setPreviewCallback(CameraView.this); } @Override public void surfaceCreated(SurfaceHolder holder) { try { stopPreview(); mCamera.setPreviewDisplay(holder); } catch (IOException exception) { mCamera.release(); mCamera = null; } } public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate); Camera.Parameters camParams = mCamera.getParameters(); camParams.setPreviewSize(imageWidth, imageHeight); Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate()); camParams.setPreviewFrameRate(frameRate); mCamera.setParameters(camParams); startPreview(); } @Override public void surfaceDestroyed(SurfaceHolder holder) { try { mHolder.addCallback(null); mCamera.setPreviewCallback(null); } catch (RuntimeException e) { // The camera has probably just been released, ignore. } } public void startPreview() { if (!isPreviewOn && mCamera != null) { isPreviewOn = true; mCamera.startPreview(); } } public void stopPreview() { if (isPreviewOn && mCamera != null) { isPreviewOn = false; mCamera.stopPreview(); } } @Override public void onPreviewFrame(byte[] data, Camera camera) { /* get video data */ if (yuvIplimage != null && recording) { yuvIplimage.getByteBuffer().put(data); Log.v(LOG_TAG,"Writing Frame"); try { long t = 1000 * (System.currentTimeMillis() - startTime); if (t > recorder.getTimestamp()) { recorder.setTimestamp(t); } recorder.record(yuvIplimage); } catch (FFmpegFrameRecorder.Exception e) { Log.v(LOG_TAG,e.getMessage()); e.printStackTrace(); } } } } @Override public void onClick(View v) { if (!recording) { startRecording(); Log.w(LOG_TAG, "Start Button Pushed"); btnRecorderControl.setText("Stop"); } else { // This will trigger the audio recording loop to stop and then set isRecorderStart = false; stopRecording(); Log.w(LOG_TAG, "Stop Button Pushed"); btnRecorderControl.setText("Start"); } } } 

在buildTypes中添加调试

buildTypes {
release {
    minifyEnabled false
    proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
debug {
    minifyEnabled false
    proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM