简体   繁体   中英

How to Open the local .sdp file by live555

I need to print every frame time(UTC) by getting every RTP timestamp but vlc API does not support this feature. So, I just heard the VLC lib called the live555 lib to parse RTSP and find the function afterGettingFrame in testRTSPClient(Demo of official website of live555) print the UTC time of every frame.

I just go to use testRTSPClient to open the .sdp file in local PC. but it does not work. It can only open the "rtsp://123.434.12.4/3523swdawd.sdp" this form and so forth ( Failed to Get SDP Description : 404 Stream not found testRTSPClient in LIVE555 ).
Do I need to install the rtsp server ? because I found out that it needs to send some special commands(SETUP,PLAY,OPTIONS) to the server.

If testRTSPClient can only process rtsp://123.434.12.4/3523swdawd.sdp this form of url, and how VLC Media Player can process local .sdp file without setup the server of RTSP ?

TIPS: This local .sdp file is for my local IP Camera. I can play video frame from IP Camera with VLC Player but I just want to use testRTSPClient process the local .sdp file and print UTC time of video frame, Does anyone can get resolution to solve this problem ?

In order to receive an RTP stream described by an SDP file with live555, you need to :

  1. Create a MediaSession from the SDP (this will create the associated MediaSubsession )
  2. Initiate the MediaSubsession in order to open the UDP ports that will receive RTP/RTCP
  3. Create an overloaded MediaSink to receive the RTP frames
  4. Start this sink

A naive implementation inspired from testRTSPClient.cpp could be something like :

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"

UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) 
{
    return env << subsession.mediumName() << "/" << subsession.codecName();
}

#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 100000

class DummySink: public MediaSink 
{
    public:
        static DummySink* createNew(UsageEnvironment& env,
                      MediaSubsession& subsession, // identifies the kind of data that's being received
                      char const* streamId = NULL) // identifies the stream itself (optional)
        {
              return new DummySink(env, subsession, streamId);
        }

    private:
        DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId)  
            : MediaSink(env), fSubsession(subsession) 
        {
            fStreamId = strDup(streamId);
            fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
        }

        virtual ~DummySink()
        {
            delete[] fReceiveBuffer;
            delete[] fStreamId;
        }

        static void afterGettingFrame(void* clientData, unsigned frameSize,
                    unsigned numTruncatedBytes,
                    struct timeval presentationTime,
                    unsigned durationInMicroseconds)
        {
            DummySink* sink = (DummySink*)clientData;
            sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);        
        }
        void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
                 struct timeval presentationTime, unsigned durationInMicroseconds)
        {
            if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
            envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
            if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
            char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
            sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
            envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
            if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) 
            {
                envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
            }
            envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
            envir() << "\n";

            // Then continue, to request the next frame of data:
            continuePlaying();          
        }

    private:
        virtual Boolean continuePlaying()
        {
            if (fSource == NULL) return False; // sanity check (should not happen)          
            fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, afterGettingFrame, this, onSourceClosure, this);
            return True;
        }

    private:
        u_int8_t* fReceiveBuffer;
        MediaSubsession& fSubsession;
        char* fStreamId;
};

int main(int argc, char** argv) 
{
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

    if (argc < 2) 
    {
        *env << "Usage: " << argv[0] << " file.sdp\n";
        return 1;
    }
    const char* filename = argv[1];
    FILE* file = fopen(filename,"r");
    if (file == NULL)
    {
        *env << "Cannot open SDP file:" << filename << "\n";        
        return 1;
    }
    fseek(file, 0, SEEK_END);
    long size = ftell(file);
    fseek(file, 0, SEEK_SET);
    char sdp[size];
    fread(sdp,size,1,file);
    fclose(file);

    MediaSession* session = MediaSession::createNew(*env, sdp);               
    if (session == NULL)
    {
        *env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";     
        return 1;
    }

    MediaSubsessionIterator iter(*session);
    MediaSubsession* subsession = NULL;
    while ((subsession = iter.next()) != NULL) 
    {
        if (!subsession->initiate (0))
        {
            *env << "Failed to initiate the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";
        }
        else
        {
            subsession->sink = DummySink::createNew(*env, *subsession, filename);
            if (subsession->sink == NULL)
            {
                *env << "Failed to create a data sink for the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";           
            }
            else
            {
                subsession->sink->startPlaying(*subsession->rtpSource(), NULL, NULL);
            }
        }
    }

    char eventLoopWatchVariable = 0;
    env->taskScheduler().doEventLoop(&eventLoopWatchVariable);

    return 0;
}

Running the program giving as argument the path to the file that contains the SDP will read the RTP streams printing frame size and the timestamp of each frames.
Something like :

Stream "ffmpeg.sdp"; video/H265:    Received 5131 bytes.    Presentation time: 1442350569.228804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7917 bytes.    Presentation time: 1442350569.268804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2383 bytes.    Presentation time: 1442350569.308804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7780 bytes.    Presentation time: 1442350569.348804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 1773 bytes.    Presentation time: 1442350569.388804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 9580 bytes.    Presentation time: 1442350569.428804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7934 bytes.    Presentation time: 1442350569.468804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2180 bytes.    Presentation time: 1442350569.508804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 10804 bytes.   Presentation time: 1442350569.548804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7801 bytes.    Presentation time: 1442350569.588804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7816 bytes.    Presentation time: 1442350569.628804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 4028 bytes.    Presentation time: 1442350569.668804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7959 bytes.    Presentation time: 1442350569.708804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8062 bytes.    Presentation time: 1442350569.794000    NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8014 bytes.    Presentation time: 1442350569.834000    NPT: 0.000000

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM