In order to receive an RTP stream described by an SDP file with live555, you need to :
- Create a
MediaSession
from the SDP (this will create the associated MediaSubsession
)
- Initiate the
MediaSubsession
in order to open the UDP ports that will receive RTP/RTCP
- Create an overloaded
MediaSink
to receive the RTP frames
- Start this sink
A naive implementation inspired from testRTSPClient.cpp could be something like :
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"
UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession)
{
return env << subsession.mediumName() << "/" << subsession.codecName();
}
#define DUMMY_SINK_RECEIVE_BUFFER_SIZE 100000
class DummySink: public MediaSink
{
public:
static DummySink* createNew(UsageEnvironment& env,
MediaSubsession& subsession, // identifies the kind of data that's being received
char const* streamId = NULL) // identifies the stream itself (optional)
{
return new DummySink(env, subsession, streamId);
}
private:
DummySink(UsageEnvironment& env, MediaSubsession& subsession, char const* streamId)
: MediaSink(env), fSubsession(subsession)
{
fStreamId = strDup(streamId);
fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
}
virtual ~DummySink()
{
delete[] fReceiveBuffer;
delete[] fStreamId;
}
static void afterGettingFrame(void* clientData, unsigned frameSize,
unsigned numTruncatedBytes,
struct timeval presentationTime,
unsigned durationInMicroseconds)
{
DummySink* sink = (DummySink*)clientData;
sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
}
void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
struct timeval presentationTime, unsigned durationInMicroseconds)
{
if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP())
{
envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
}
envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
envir() << "\n";
// Then continue, to request the next frame of data:
continuePlaying();
}
private:
virtual Boolean continuePlaying()
{
if (fSource == NULL) return False; // sanity check (should not happen)
fSource->getNextFrame(fReceiveBuffer, DUMMY_SINK_RECEIVE_BUFFER_SIZE, afterGettingFrame, this, onSourceClosure, this);
return True;
}
private:
u_int8_t* fReceiveBuffer;
MediaSubsession& fSubsession;
char* fStreamId;
};
int main(int argc, char** argv)
{
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
UsageEnvironment* env = BasicUsageEnvironment::createNew(*scheduler);
if (argc < 2)
{
*env << "Usage: " << argv[0] << " file.sdp\n";
return 1;
}
const char* filename = argv[1];
FILE* file = fopen(filename,"r");
if (file == NULL)
{
*env << "Cannot open SDP file:" << filename << "\n";
return 1;
}
fseek(file, 0, SEEK_END);
long size = ftell(file);
fseek(file, 0, SEEK_SET);
char sdp[size];
fread(sdp,size,1,file);
fclose(file);
MediaSession* session = MediaSession::createNew(*env, sdp);
if (session == NULL)
{
*env << "Failed to create a MediaSession object from the SDP description: " << env->getResultMsg() << "\n";
return 1;
}
MediaSubsessionIterator iter(*session);
MediaSubsession* subsession = NULL;
while ((subsession = iter.next()) != NULL)
{
if (!subsession->initiate (0))
{
*env << "Failed to initiate the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";
}
else
{
subsession->sink = DummySink::createNew(*env, *subsession, filename);
if (subsession->sink == NULL)
{
*env << "Failed to create a data sink for the \"" << *subsession << "\" subsession: " << env->getResultMsg() << "\n";
}
else
{
subsession->sink->startPlaying(*subsession->rtpSource(), NULL, NULL);
}
}
}
char eventLoopWatchVariable = 0;
env->taskScheduler().doEventLoop(&eventLoopWatchVariable);
return 0;
}
Running the program giving as argument the path to the file that contains the SDP will read the RTP streams printing frame size and the timestamp of each frames.
Something like :
Stream "ffmpeg.sdp"; video/H265: Received 5131 bytes. Presentation time: 1442350569.228804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7917 bytes. Presentation time: 1442350569.268804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 2383 bytes. Presentation time: 1442350569.308804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7780 bytes. Presentation time: 1442350569.348804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 1773 bytes. Presentation time: 1442350569.388804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 9580 bytes. Presentation time: 1442350569.428804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7934 bytes. Presentation time: 1442350569.468804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 2180 bytes. Presentation time: 1442350569.508804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 10804 bytes. Presentation time: 1442350569.548804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7801 bytes. Presentation time: 1442350569.588804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7816 bytes. Presentation time: 1442350569.628804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 4028 bytes. Presentation time: 1442350569.668804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 7959 bytes. Presentation time: 1442350569.708804! NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 8062 bytes. Presentation time: 1442350569.794000 NPT: 0.000000
Stream "ffmpeg.sdp"; video/H265: Received 8014 bytes. Presentation time: 1442350569.834000 NPT: 0.000000