简体   繁体   English

使用Live555从连接到H264编码器的IP摄像机流式传输实时视频

[英]Using Live555 to Stream Live Video from an IP camera connected to an H264 encoder

I am using a custom Texas Instruments OMAP-L138 based board that basically consists of an ARM9 based SoC and a DSP processor. 我使用的是定制的德州仪器OMAP-L138板,基本上由基于ARM9的SoC和DSP处理器组成。 It is connected to a camera lens. 它连接到相机镜头。 What I'm trying to do is to capture live video stream which is sent to the dsp processor for H264 encoding which is sent over uPP in packets of 8192 bytes. 我要做的是捕获发送到dsp处理器的实时视频流,用于H264编码,该编码通过uPP以8192字节的数据包发送。 I want to use the testH264VideoStreamer supplied by Live555 to live stream the H264 encoded video over RTSP. 我想使用Live555提供的testH264VideoStreamer在RTSP上直播H264编码视频。 The code I have modified is shown below: 我修改过的代码如下所示:

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;


//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------


//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward


//------------------------------------------------------------------------------
// MAIN FUNCTION / ENTRY POINT 
//------------------------------------------------------------------------------
int main(int argc, char** argv) 
{
    // Begin by setting up our live555 usage environment:
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    env = BasicUsageEnvironment::createNew(*scheduler);

    // Create 'groupsocks' for RTP and RTCP:
    struct in_addr destinationAddress;
    destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
    // Note: This is a multicast address.  If you wish instead to stream
    // using unicast, then you should use the "testOnDemandRTSPServer"
    // test program - not this test program - as a model.

    const unsigned short rtpPortNum = 18888;
    const unsigned short rtcpPortNum = rtpPortNum+1;
    const unsigned char ttl = 255;

    const Port rtpPort(rtpPortNum);
    const Port rtcpPort(rtcpPortNum);

    Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
    rtpGroupsock.multicastSendOnly(); // we're a SSM source
    Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
    rtcpGroupsock.multicastSendOnly(); // we're a SSM source

    // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
    OutPacketBuffer::maxSize = 1000000;
    videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

    // Create (and start) a 'RTCP instance' for this RTP sink:
    const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
    const unsigned maxCNAMElen = 100;
    unsigned char CNAME[maxCNAMElen+1];
    gethostname((char*)CNAME, maxCNAMElen);
    CNAME[maxCNAMElen] = '\0'; // just in case
    RTCPInstance* rtcp
    = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
    // Note: This starts RTCP running automatically

    /*Create RTSP SERVER*/
    RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
    if (rtspServer == NULL) 
    {
         *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
         exit(1);
    }
    ServerMediaSession* sms
        = ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
    sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
    rtspServer->addServerMediaSession(sms);

    char* url = rtspServer->rtspURL(sms);
    *env << "Play this stream using the URL \"" << url << "\"\n";
    delete[] url;

    // Start the streaming:
    *env << "Beginning streaming...\n";
    play();

    env->taskScheduler().doEventLoop(); // does not return

    return 0; // only to prevent compiler warning
}



//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 
{
    *env << "...done reading from upp buffer\n";
    //videoSink->stopPlaying();
    //Medium::close(videoSource);
    // Note that this also closes the input file that this source read from.

    // Start playing once again to get the next stream      
    play();

    /* We don't need to close the dev as long as we're reading from it. But if we do, use: close( "/dev/upp", O_RDWR);*/ 

}



//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream 
//----------------------------------------------------------------------------------------------
void play()
{



    /* Read nbytes of buffer (sizeof buf ) from the filedescriptor stream and assign them to address where buf is located */
    read(stream, &buf, sizeof buf);
    printf("Reading from UPP in to Buffer");

    /*Open the input file as a 'byte-stream file source': */
    ByteStreamMemoryBufferSource* buffSource
        = ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
    /*By passing False in the above creatNew() method means that the buffer would be read at once */

    if (buffSource == NULL) 
    {
      *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    }

    FramedSource* videoES = buffSource;
    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

The Problem is that the code though compiles successfully but I'm unable to get the desired output. 问题是代码虽然成功编译但我无法获得所需的输出。 the RTSP stream on VLC player is on play mode however I can't see any video. VLC播放器上的RTSP流处于播放模式但是我看不到任何视频。 I'd be grateful for any assistance in this matter. 我对此事的任何帮助表示感谢。 I might come as a little vague in my description but I'm happy to further explain any part that is required. 在我的描述中,我可能会有点模糊,但我很乐意进一步解释所需的任何部分。

Okay so I figured out what needed to be done and am writing for the benefit of all who might face a similar issue. 好的,所以我想出了需要做些什么,并且为了所有可能面临类似问题的人的利益而写作。 What I needed to do was modify my testH264VideoStreamer.cpp and DeviceSource.cpp file such that it directly reads data from the device (in my case it was the custom am1808 board), store it in a buffer and stream it. 我需要做的是修改我的testH264VideoStreamer.cpp和DeviceSource.cpp文件,使其直接从设备读取数据(在我的情况下,它是自定义的am1808板),将其存储在缓冲区并流式传输。 The changes I made were: 我所做的改变是:

testH264VideoStreamer.cpp testH264VideoStreamer.cpp

#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function




UsageEnvironment* env;

H264VideoStreamFramer* videoSource;
RTPSink* videoSink;

void play(); // forward
//-------------------------------------------------------------------------
//Entry Point -> Main FUNCTION  
//-------------------------------------------------------------------------

int main(int argc, char** argv) {
  // Begin by setting up our usage environment:
  TaskScheduler* scheduler = BasicTaskScheduler::createNew();
  env = BasicUsageEnvironment::createNew(*scheduler);

  // Create 'groupsocks' for RTP and RTCP:
  struct in_addr destinationAddress;
  destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
  // Note: This is a multicast address.  If you wish instead to stream
  // using unicast, then you should use the "testOnDemandRTSPServer"
  // test program - not this test program - as a model.

  const unsigned short rtpPortNum = 18888;
  const unsigned short rtcpPortNum = rtpPortNum+1;
  const unsigned char ttl = 255;

  const Port rtpPort(rtpPortNum);
  const Port rtcpPort(rtcpPortNum);

  Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
  rtpGroupsock.multicastSendOnly(); // we're a SSM source
  Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
  rtcpGroupsock.multicastSendOnly(); // we're a SSM source

  // Create a 'H264 Video RTP' sink from the RTP 'groupsock':
  OutPacketBuffer::maxSize = 600000;
  videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);

  // Create (and start) a 'RTCP instance' for this RTP sink:
  const unsigned estimatedSessionBandwidth = 1024; // in kbps; for RTCP b/w share
  const unsigned maxCNAMElen = 100;
  unsigned char CNAME[maxCNAMElen+1];
  gethostname((char*)CNAME, maxCNAMElen);
  CNAME[maxCNAMElen] = '\0'; // just in case
  RTCPInstance* rtcp
  = RTCPInstance::createNew(*env, &rtcpGroupsock,
                estimatedSessionBandwidth, CNAME,
                videoSink, NULL /* we're a server */,
                True /* we're a SSM source */);
  // Note: This starts RTCP running automatically

  RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
  if (rtspServer == NULL) {
    *env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
    exit(1);
  }
  ServerMediaSession* sms
    = ServerMediaSession::createNew(*env, "ipcamera","UPP Buffer" ,
           "Session streamed by \"testH264VideoStreamer\"",
                       True /*SSM*/);
  sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
  rtspServer->addServerMediaSession(sms);

  char* url = rtspServer->rtspURL(sms);
  *env << "Play this stream using the URL \"" << url << "\"\n";
  delete[] url;

  // Start the streaming:
  *env << "Beginning streaming...\n";
  play();

  env->taskScheduler().doEventLoop(); // does not return

  return 0; // only to prevent compiler warning
}
//----------------------------------------------------------------------
//AFTER PLAY FUNCTION CALLED HERE
//----------------------------------------------------------------------
void afterPlaying(void* /*clientData*/) 
{

    play();
}
//------------------------------------------------------------------------
//PLAY FUNCTION () 
//------------------------------------------------------------------------
void play()
{


      // Open the input file as with Device as the source:
    DeviceSource* devSource
        = DeviceSource::createNew(*env);
    if (devSource == NULL) 
    {

          *env << "Unable to read from\"" << "Buffer"
           << "\" as a byte-stream source\n";
          exit(1);
    }

    FramedSource* videoES = devSource;

    // Create a framer for the Video Elementary Stream:
    videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);

    // Finally, start playing:
    *env << "Beginning to read from UPP...\n";
    videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}

DeviceSource.cpp DeviceSource.cpp

#include "DeviceSource.hh"
#include <GroupsockHelper.hh> // for "gettimeofday()"
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h>

//static uint8_t *buf = (uint8_t*)malloc(102400);
static uint8_t buf[8192];
int upp_stream;
//static uint8_t *bufPtr = buf;

DeviceSource*
DeviceSource::createNew(UsageEnvironment& env)
{

  return new DeviceSource(env);
}

EventTriggerId DeviceSource::eventTriggerId = 0;

unsigned DeviceSource::referenceCount = 0;

DeviceSource::DeviceSource(UsageEnvironment& env):FramedSource(env) 
{ 

  if (referenceCount == 0) 
  {

      upp_stream = open("/dev/upp",O_RDWR);

  }
  ++referenceCount;

  if (eventTriggerId == 0) 
  {
    eventTriggerId = envir().taskScheduler().createEventTrigger(deliverFrame0);
  }
}

DeviceSource::~DeviceSource(void) {
  --referenceCount;
  envir().taskScheduler().deleteEventTrigger(eventTriggerId);
  eventTriggerId = 0;

  if (referenceCount == 0) 
  {

  }
}

int loop_count;

void DeviceSource::doGetNextFrame() 
{

    //for (loop_count=0; loop_count < 13; loop_count++)
    //{
        read(upp_stream,buf, 8192);

        //bufPtr+=8192;

    //}
    deliverFrame();

}

void DeviceSource::deliverFrame0(void* clientData) 
{
  ((DeviceSource*)clientData)->deliverFrame();
}

void DeviceSource::deliverFrame() 
{


  if (!isCurrentlyAwaitingData()) return; // we're not ready for the data yet

  u_int8_t* newFrameDataStart = (u_int8_t*) buf;             //(u_int8_t*) buf; //%%% TO BE WRITTEN %%%
  unsigned newFrameSize = sizeof(buf); //%%% TO BE WRITTEN %%%

  // Deliver the data here:
  if (newFrameSize > fMaxSize) {
    fFrameSize = fMaxSize;
    fNumTruncatedBytes = newFrameSize - fMaxSize;
  } else {
    fFrameSize = newFrameSize;
  }
  gettimeofday(&fPresentationTime, NULL); 
  memmove(fTo, newFrameDataStart, fFrameSize);
  FramedSource::afterGetting(this);
}

After compiling the code with these modifications, I was able to receive video stream on vlc player. 在使用这些修改编译代码后,我能够在vlc播放器上接收视频流。

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM