[英]blob detection in unity3d: NullReferenceException
我是C#的新手。 我正在嘗試在unity3d中使用opencv資產進行斑點檢測。
我收到此錯誤:
NullReferenceException:對象引用未設置為對象的實例
在第89行之后(我將其稱為detector.detect),統一控制台將我指向FeatureDetect.cs代碼中的230行(opecv庫的一部分)。
很久以來,我一直在努力解決此問題,我們非常感謝您的幫助!
以下是我的代碼:
using UnityEngine;
using System.Collections;
using OpenCVForUnity;
namespace OpenCVForUnitySample
{
/// <summary>
/// WebCamTexture to mat sample.
/// </summary>
public class WebCamTextureToMatSample : MonoBehaviour
{
WebCamTexture webCamTexture;
Color32[] colors;
public bool isFrontFacing = false;
int width = 640;
int height = 480;
public Mat rgbaMat;
Texture2D texture;
bool initDone = false;
public Mat GrayMat;
public Mat KeypointMat;
public FeatureDetector detector;
public MatOfKeyPoint keypoint1;
// Use this for initialization
void Start ()
{
StartCoroutine (init ());
}
private IEnumerator init ()
{
if (webCamTexture != null) {
webCamTexture.Stop ();
initDone = false;
rgbaMat.Dispose ();
}
// Checks how many and which cameras are available on the device
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {
Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
webCamTexture = new WebCamTexture (WebCamTexture.devices [cameraIndex].name, width, height);
break;
}
}
if (webCamTexture == null) {
webCamTexture = new WebCamTexture (width, height);
}
Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
// Starts the camera
webCamTexture.Play ();
while (true) {
//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored);
colors = new Color32[webCamTexture.width * webCamTexture.height];
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
GrayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
detector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
Debug.Log ("detector created");
detector.detect(GrayMat,keypoint1); // <<< ERROR HERE
Debug.Log ("keypoints created");
//Scalar red = Scalar(0,255,0);
Features2d.drawKeypoints(GrayMat,keypoint1,KeypointMat);
texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
#if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR
gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
#endif
// gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);
gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);
// bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
// float scaleX = 1;
// float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
// if (webCamTexture.videoRotationAngle == 270)
// scaleY = -1.0f;
// gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
Camera.main.orthographicSize = webCamTexture.width / 2;
initDone = true;
break;
} else {
yield return 0;
}
}
}
// Update is called once per frame
void Update ()
{
if (!initDone)
return;
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
Utils.webCamTextureToMat (webCamTexture, GrayMat, colors);
#if UNITY_IPHONE && !UNITY_EDITOR
if (webCamTexture.videoVerticallyMirrored){
if(isFrontFacing){
Core.flip (rgbaMat, rgbaMat, 1);
}else{
Core.flip (rgbaMat, rgbaMat, 0);
}
}else{
if(isFrontFacing){
Core.flip (rgbaMat, rgbaMat, -1);
}
}
#endif
//Utils.matToTexture2D (rgbaMat, texture, colors);
Utils.matToTexture2D (GrayMat, texture, colors);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
}
}
void OnDisable ()
{
webCamTexture.Stop ();
}
void OnGUI ()
{
float screenScale = Screen.width / 240.0f;
Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
GUI.matrix = scaledMatrix;
GUILayout.BeginVertical ();
if (GUILayout.Button ("back")) {
Application.LoadLevel ("OpenCVForUnitySample");
}
if (GUILayout.Button ("change camera")) {
isFrontFacing = !isFrontFacing;
StartCoroutine (init ());
}
GUILayout.EndVertical ();
}
}
}
嘗試這個
detector = new FeatureDetector(); //assuming it has a default constructor.
detector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB);
Debug.Log ("detector created");
//detector.detect(
**detector.detect(GrayMat,keypoint1);**
Debug.Log ("keypoints created");
更新
如果檢查我對檢測器的評論= new FeatureDetector(); 我添加了假設它具有默認構造函數。 現在,您遇到的新錯誤只是說FeatureDetector
類沒有一個。
在C#中,默認情況下,如果您沒有默認的構造函數,它將創建一個默認的構造函數,但是如果您在默認構造函數之前先創建一個參數構造函數,它將以某種方式無法創建它。
現在,您必須手動添加它。 只需打開FeatureDetector類並添加此行
public FeatureDetector(){}
這就對了。 嘗試再次運行您的代碼。
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.