首页
学习
活动
专区
圈层
工具
发布
社区首页 >问答首页 >如何用.sdp打开本地live555文件

如何用.sdp打开本地live555文件
EN

Stack Overflow用户
提问于 2015-09-09 09:02:45
回答 1查看 2.4K关注 0票数 4

我需要打印每帧时间(UTC)通过获得每个RTP时间戳,但vlc API不支持这个特性。因此,我刚听说VLC库调用live555库来解析RTSP,并在testRTSPClient(官方网站testRTSPClient)中找到函数afterGettingFrame,打印出每个帧的UTC时间。

我只是去使用testRTSPClient在本地PC中打开.sdp文件。但不起作用。它只能打开"rtsp://123.434.12.4/3523swdawd.sdp“这个表单,以此类推。

我需要安装rtsp服务器吗?因为我发现它需要向服务器发送一些特殊的命令(安装、播放、选项)。

如果testRTSPClient只能处理rtsp://123.434.12.4/3523swdawd.sdp这种形式的url,那么testRTSPClient媒体播放器如何处理本地.sdp文件而不设置RTSP服务器?

提示:这个本地.sdp文件是给我的本地IP相机的。我可以用VLC播放器从IP摄像头播放视频帧,但我只想使用testRTSPClient处理本地.sdp文件并打印视频帧的UTC时间,有谁能解决这个问题呢?

EN

回答 1

Stack Overflow用户

回答已采纳

发布于 2015-09-15 20:52:37

为了接收使用live555的SDP文件描述的RTP流,您需要:

  1. 从SDP创建一个MediaSession (这将创建关联的MediaSubsession )
  2. 启动MediaSubsession以便打开接收RTP/RTCP的UDP端口
  3. 创建一个重载的MediaSink来接收RTP帧
  4. 启动这个水槽

受testRTSPClient.cpp启发的天真实现可能如下所示:

代码语言:javascript
复制
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"

UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) 
{
    return env << subsession.mediumName() << "/" << subsession.codecName();
}

#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 100000

class DummySink: public MediaSink 
{
    public:
        static DummySink* createNew(UsageEnvironment& env,
                      MediaSubsession& subsession, // identifies the kind of data that's being received
                      char const* streamId = NULL) // identifies the stream itself (optional)
        {
              return new DummySink(env, subsession, streamId);
        }

    private:
        DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId)  
            : MediaSink(env), fSubsession(subsession) 
        {
            fStreamId = strDup(streamId);
            fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
        }

        virtual ~DummySink()
        {
            delete[] fReceiveBuffer;
            delete[] fStreamId;
        }

        static void afterGettingFrame(void* clientData, unsigned frameSize,
                    unsigned numTruncatedBytes,
                    struct timeval presentationTime,
                    unsigned durationInMicroseconds)
        {
            DummySink* sink = (DummySink*)clientData;
            sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);        
        }
        void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
                 struct timeval presentationTime, unsigned durationInMicroseconds)
        {
            if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
            envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
            if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
            char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
            sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
            envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
            if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) 
            {
                envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
            }
            envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
            envir() << "\n";

            // Then continue, to request the next frame of data:
            continuePlaying();          
        }

    private:
        virtual Boolean continuePlaying()
        {
            if (fSource == NULL) return False; // sanity check (should not happen)          
            fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, afterGettingFrame, this, onSourceClosure, this);
            return True;
        }

    private:
        u_int8_t* fReceiveBuffer;
        MediaSubsession& fSubsession;
        char* fStreamId;
};

int main(int argc, char** argv) 
{
    TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);

    if (argc < 2) 
    {
        *env << "Usage: " << argv[0] << " file.sdp\n";
        return 1;
    }
    const char* filename = argv[1];
    FILE* file = fopen(filename,"r");
    if (file == NULL)
    {
        *env << "Cannot open SDP file:" << filename << "\n";        
        return 1;
    }
    fseek(file, 0, SEEK_END);
    long size = ftell(file);
    fseek(file, 0, SEEK_SET);
    char sdp[size];
    fread(sdp,size,1,file);
    fclose(file);

    MediaSession* session = MediaSession::createNew(*env, sdp);               
    if (session == NULL)
    {
        *env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";     
        return 1;
    }

    MediaSubsessionIterator iter(*session);
    MediaSubsession* subsession = NULL;
    while ((subsession = iter.next()) != NULL) 
    {
        if (!subsession->initiate (0))
        {
            *env << "Failed to initiate the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";
        }
        else
        {
            subsession->sink = DummySink::createNew(*env, *subsession, filename);
            if (subsession->sink == NULL)
            {
                *env << "Failed to create a data sink for the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";           
            }
            else
            {
                subsession->sink->startPlaying(*subsession->rtpSource(), NULL, NULL);
            }
        }
    }

    char eventLoopWatchVariable = 0;
    env->taskScheduler().doEventLoop(&eventLoopWatchVariable);

    return 0;
}

运行程序时,给出包含SDP的文件的路径作为参数,将读取RTP流、打印帧大小和每个帧的时间戳。

类似于:

代码语言:javascript
复制
Stream "ffmpeg.sdp"; video/H265:    Received 5131 bytes.    Presentation time: 1442350569.228804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7917 bytes.    Presentation time: 1442350569.268804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2383 bytes.    Presentation time: 1442350569.308804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7780 bytes.    Presentation time: 1442350569.348804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 1773 bytes.    Presentation time: 1442350569.388804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 9580 bytes.    Presentation time: 1442350569.428804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7934 bytes.    Presentation time: 1442350569.468804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 2180 bytes.    Presentation time: 1442350569.508804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 10804 bytes.   Presentation time: 1442350569.548804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7801 bytes.    Presentation time: 1442350569.588804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7816 bytes.    Presentation time: 1442350569.628804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 4028 bytes.    Presentation time: 1442350569.668804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 7959 bytes.    Presentation time: 1442350569.708804!   NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8062 bytes.    Presentation time: 1442350569.794000    NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265:    Received 8014 bytes.    Presentation time: 1442350569.834000    NPT: 0.000000
票数 2
EN
页面原文内容由Stack Overflow提供。腾讯云小微IT领域专用引擎提供翻译支持
原文链接:

https://stackoverflow.com/questions/32475317

复制
相关文章

相似问题

领券
问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档