簡體   English   中英

在Android上將OpenCV與Unity結合使用

[英]Using OpenCV with Unity on Android

我在Android上使用OpenCV和Unity時遇到問題。 我使用OpenCV庫本機c ++插件(.so文件),我想檢測Rubik的立方體和立方體的顏色。

因此,我將數據從Unity的網絡攝像頭紋理傳遞到OpenCV。 當webcamtexture的寬度和高度較小(320x240)時,這是很好的工作。 但是當webcamtexture的寬度和高度較大(480x640)時會遇到麻煩。 並且當webcamtexture的分辨率設置為全屏(在這種情況下,我設置為800x1280)時,應用會停止。

我嘗試調試。 當我通過使用Marshal.Copy函數將數據從OpenCV復制到Unity時,它將停止。 我該如何解決?

在此處輸入圖片說明

(其分辨率為320x240)

[情況1(分辨率為320x240)]

using UnityEngine;
using UnityEngine.UI;

using System.Runtime.InteropServices;
using System;

public class Controller : MonoBehaviour {
    public float RotateSpeed = 0.5f;

    public RawImage InImage;
    public RawImage OutImage;

    WebCamTexture wct;
    Texture2D outTexture;

    void Awake()
    {
#if UNITY_EDITOR
        int width = 1280;
        int height = 720;
#else
        int width = 320;
        int height = 240;
#endif

        NativeLibAdapter.InitCV(width, height);

        outTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);

        wct = new WebCamTexture(width, height);
        wct.Play();
        // Ignore it.
        Debug.LogWarning("Foo Value in C++ is " + NativeLibAdapter.FooTest());
    }

    void Update()
    {

        if (wct.width > 100 && wct.height > 100)
        {
            Color32[] pixels = wct.GetPixels32();
            GCHandle pixelHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);

            IntPtr results = NativeLibAdapter.SubmitFrame(wct.width, wct.height, pixelHandle.AddrOfPinnedObject());
            int bufferSize = wct.width * wct.height * 4;
            byte[] rawData = new byte[bufferSize];

            if (results != IntPtr.Zero)
            {
                Marshal.Copy(results, rawData, 0, bufferSize);

                outTexture.LoadRawTextureData(rawData);
                outTexture.Apply();
            }

            OutImage.texture = outTexture;

            rawData = null;
            pixelHandle.Free();
        }
    }
}

在此處輸入圖片說明 (其分辨率為480x640。有很多噪點。)

[情況2(分辨率為480x640)]

using UnityEngine;
using UnityEngine.UI;

using System.Runtime.InteropServices;
using System;

public class Controller : MonoBehaviour {
    public float RotateSpeed = 0.5f;

    public RawImage InImage;
    public RawImage OutImage;

    WebCamTexture wct;
    Texture2D outTexture;

    void Awake()
    {
#if UNITY_EDITOR
        int width = 1280;
        int height = 720;
#else
        int width = 480;
        int height = 640;
#endif

        NativeLibAdapter.InitCV(width, height);

        outTexture = new Texture2D(width, height, TextureFormat.RGBA32, false);

        wct = new WebCamTexture(width, height);
        wct.Play();
        // Ignore it.
        Debug.LogWarning("Foo Value in C++ is " + NativeLibAdapter.FooTest());
    }

    void Update()
    {

        if (wct.width > 100 && wct.height > 100)
        {
            Color32[] pixels = wct.GetPixels32();
            GCHandle pixelHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);

            IntPtr results = NativeLibAdapter.SubmitFrame(wct.width, wct.height, pixelHandle.AddrOfPinnedObject());
            int bufferSize = wct.width * wct.height * 4;
            byte[] rawData = new byte[bufferSize];

            if (results != IntPtr.Zero)
            {
                Marshal.Copy(results, rawData, 0, bufferSize);

                outTexture.LoadRawTextureData(rawData);
                outTexture.Apply();
            }

            OutImage.texture = outTexture;

            rawData = null;
            pixelHandle.Free();
        }
    }
}

它只是更改width和height變量。

[情況3(分辨率在設備上為全屏)]

using UnityEngine;
using UnityEngine.UI;

using System.Runtime.InteropServices;
using System;

public class Controller : MonoBehaviour {
    public float RotateSpeed = 0.5f;

    public RawImage InImage;
    public RawImage OutImage;

    WebCamTexture wct;
    Texture2D outTexture;

    void Awake()
    {
#if UNITY_EDITOR
        int width = 1280;
        int height = 720;
#else
        int width = 480;
        int height = 640;
#endif

        NativeLibAdapter.InitCV(Screen.width, Screen.height);

        outTexture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGBA32, false);

        wct = new WebCamTexture(Screen.width, Screen.height);
        wct.Play();

        // Ignore it.
        Debug.LogWarning("Foo Value in C++ is " + NativeLibAdapter.FooTest());
    }

    void Update()
    {

        if (wct.width > 100 && wct.height > 100)
        {
            Color32[] pixels = wct.GetPixels32();
            GCHandle pixelHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);

            IntPtr results = NativeLibAdapter.SubmitFrame(wct.width, wct.height, pixelHandle.AddrOfPinnedObject());
            int bufferSize = wct.width * wct.height * 4;
            byte[] rawData = new byte[bufferSize];

            if (results != IntPtr.Zero)
            {
                // here! App stops. I don't know why App stops.
                Marshal.Copy(results, rawData, 0, bufferSize);

                outTexture.LoadRawTextureData(rawData);
                outTexture.Apply();
            }

            OutImage.texture = outTexture;

            rawData = null;
            pixelHandle.Free();
        }
    }
}

我在用:

  • 集成開發環境
  • Unity 2018.1.19f
  • Android Studio 3.4.2
  • OpenCV 3.4.3

您具有以下代碼來初始化CV緩沖區:

#if UNITY_EDITOR
        int width = 1280;
        int height = 720;
#else
        int width = 480;
        int height = 640;
#endif

        NativeLibAdapter.InitCV(Screen.width, Screen.height);

CV緩沖區的初始化不足以處理800x1280,因此您可能會用盡內存。 預計隨着內存的增加,性能會隨着大小的增加而下降(640x480的數據量是320x240的4倍,因此每幀的垃圾回收和數據處理量會更多)。 在將數據輸入CV之前,您需要增加寬度和高度以包含最大分辨率,或限制從GetPixels()中檢索的像素。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM