live555如何打开本地.sdp文件

How to Open the local .sdp file by live555

我需要通过获取每个 RTP 时间戳来打印每个帧时间 (UTC),但 vlc API 不支持此功能。所以,只听说VLC lib调用live555 lib来解析RTSP,在testRTSPClient(live555官网Demo)中找到afterGettingFrame的函数,打印每一帧的UTC时间。

我只是去用testRTSPClient打开本地PC的.sdp文件。但它不起作用。只能打开"rtsp://123.434.12.4/3523swdawd.sdp"这个表格等等().
我需要安装 rtsp 服务器吗?因为我发现它需要向服务器发送一些特殊命令(SETUP,PLAY,OPTIONS)。

如果testRTSPClient只能处理rtsp://123.434.12.4/3523swdawd.sdp这种形式的url,VLC Media Player如何在不设置RTSP服务器的情况下处理本地.sdp文件?

提示: 此本地 .sdp 文件适用于我的本地 IP 摄像机。我可以使用 VLC Player 从 IP Camera 播放视频帧,但我只想使用 testRTSPClient 处理本地 .sdp 文件并打印视频帧的 UTC 时间,有人可以解决这个问题吗?

为了使用 live555 接收由 SDP 文件描述的 RTP 流,您需要:

  1. 从 SDP 创建 MediaSession(这将创建关联的 MediaSubsession
  2. 启动 MediaSubsession 以打开将接收 RTP/RTCP
  3. 的 UDP 端口
  4. 创建一个重载的 MediaSink 来接收 RTP 帧
  5. 启动这个接收器

受 testRTSPClient.cpp 启发的天真的实现可能类似于:

#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"

UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) 
{
    return env << subsession.mediumName() << "/" << subsession.codecName();
}

#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 100000

class DummySink: public MediaSink 
{
    public:
        static DummySink* createNew(UsageEnvironment& env,
                      MediaSubsession& subsession, // identifies the kind of data that's being received
                      char const* streamId = NULL) // identifies the stream itself (optional)
        {
              return new DummySink(env, subsession, streamId);
        }

    private:
        DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId)  
            : MediaSink(env), fSubsession(subsession) 
        {
            fStreamId = strDup(streamId);
            fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
        }

        virtual ~DummySink()
        {
            delete[] fReceiveBuffer;
            delete[] fStreamId;
        }

        static void afterGettingFrame(void* clientData, unsigned frameSize,
                    unsigned numTruncatedBytes,
                    struct timeval presentationTime,
                    unsigned durationInMicroseconds)
        {
            DummySink* sink = (DummySink*)clientData;
            sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);        
        }
        void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
                 struct timeval presentationTime, unsigned durationInMicroseconds)
        {
            if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
            envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
            if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
            char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
            sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
            envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
            if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) 
            {
                envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
            }
            envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
            envir() << "\n";

            // Then continue, to request the next frame of data:
            continuePlaying();          
        }

    private:
        virtual Boolean continuePlaying()
        {
            if (fSource == NULL) return False; // sanity check (should not happen)          
            fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, afterGettingFrame, this, onSourceClosure, this);
            return True;
        }

    private:
        u_int8_t* fReceiveBuffer;
        MediaSubsession& fSubsession;
        char* fStreamId;
};

int main(int argc, char** argv) 
{
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

    if (argc < 2) 
    {
        *env << "Usage: " << argv[0] << " file.sdp\n";
        return 1;
    }
    const char* filename = argv[1];
    FILE* file = fopen(filename,"r");
    if (file == NULL)
    {
        *env << "Cannot open SDP file:" << filename << "\n";        
        return 1;
    }
    fseek(file, 0, SEEK_END);
    long size = ftell(file);
    fseek(file, 0, SEEK_SET);
    char sdp[size];
    fread(sdp,size,1,file);
    fclose(file);

    MediaSession* session = MediaSession::createNew(*env, sdp);               
    if (session == NULL)
    {
        *env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";     
        return 1;
    }

    MediaSubsessionIterator iter(*session);
    MediaSubsession* subsession = NULL;
    while ((subsession = iter.next()) != NULL) 
    {
        if (!subsession->initiate (0))
        {
            *env << "Failed to initiate the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";
        }
        else
        {
            subsession->sink = DummySink::createNew(*env, *subsession, filename);
            if (subsession->sink == NULL)
            {
                *env << "Failed to create a data sink for the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";           
            }
            else
            {
                subsession->sink->startPlaying(*subsession->rtpSource(), NULL, NULL);
            }
        }
    }

    char eventLoopWatchVariable = 0;
    env->taskScheduler().doEventLoop(&eventLoopWatchVariable);

    return 0;
}

运行 程序将包含 SDP 的文件路径作为参数,将读取 RTP 流打印帧大小和每个帧的时间戳。
像 :

Stream "ffmpeg.sdp"; video/H265:    Received 5131 bytes.    Presentation time: 1442350569.228804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7917 bytes.    Presentation time: 1442350569.268804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2383 bytes.    Presentation time: 1442350569.308804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7780 bytes.    Presentation time: 1442350569.348804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 1773 bytes.    Presentation time: 1442350569.388804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 9580 bytes.    Presentation time: 1442350569.428804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7934 bytes.    Presentation time: 1442350569.468804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2180 bytes.    Presentation time: 1442350569.508804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 10804 bytes.   Presentation time: 1442350569.548804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7801 bytes.    Presentation time: 1442350569.588804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7816 bytes.    Presentation time: 1442350569.628804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 4028 bytes.    Presentation time: 1442350569.668804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7959 bytes.    Presentation time: 1442350569.708804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8062 bytes.    Presentation time: 1442350569.794000    NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8014 bytes.    Presentation time: 1442350569.834000    NPT: 0.000000