3rdparty/licenseReport: Add seperate LGPL checks
[haiku.git] / src / add-ons / media / plugins / rtsp_streamer / rtsp.cpp
bloba804523e9dd9ce46d5442d64d5a67bd478ef5470
1 /**********
2 This library is free software; you can redistribute it and/or modify it under
3 the terms of the GNU Lesser General Public License as published by the
4 Free Software Foundation; either version 2.1 of the License, or (at your
5 option) any later version. (See <http://www.gnu.org/copyleft/lesser.html>.)
6 This library is distributed in the hope that it will be useful, but WITHOUT
7 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
8 FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
9 more details.
10 You should have received a copy of the GNU Lesser General Public License
11 along with this library; if not, write to the Free Software Foundation, Inc.,
12 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
13 **********/
14 // Copyright (c) 1996-2016, Live Networks, Inc. All rights reserved
15 // Copyright (c) 2016, Dario Casalinuovo. All rights reserved.
18 #include "rtsp.h"
20 #include <AdapterIO.h>
22 #include "RTSPMediaIO.h"
25 #define REQUEST_STREAMING_OVER_TCP False
26 #define RECEIVE_BUFFER_SIZE 100000
29 UsageEnvironment& operator<<(UsageEnvironment& env,
30 const RTSPClient& rtspClient)
32 return env << "[URL:\"" << rtspClient.url() << "\"]: ";
36 UsageEnvironment& operator<<(UsageEnvironment& env,
37 const MediaSubsession& subsession)
39 return env << subsession.mediumName() << "/" << subsession.codecName();
43 class AdapterSink : public MediaSink
45 public:
46 static AdapterSink* createNew(UsageEnvironment& env,
47 MediaSubsession& subsession,
48 BInputAdapter* inputAdapter,
49 char const* streamId = NULL);
51 private:
52 AdapterSink(UsageEnvironment& env,
53 MediaSubsession& subsession,
54 char const* streamId,
55 BInputAdapter* inputAdapter);
57 virtual ~AdapterSink();
59 static void afterGettingFrame(void* clientData,
60 unsigned frameSize,
61 unsigned numTruncatedBytes,
62 struct timeval presentationTime,
63 unsigned durationInMicroseconds);
65 void afterGettingFrame(unsigned frameSize,
66 unsigned numTruncatedBytes,
67 struct timeval presentationTime,
68 unsigned durationInMicroseconds);
70 private:
71 // redefined virtual functions:
72 virtual Boolean continuePlaying();
74 private:
75 BInputAdapter* fInputAdapter;
76 u_int8_t* fReceiveBuffer;
77 MediaSubsession& fSubsession;
78 char* fStreamId;
81 // Implementation of the RTSP 'response handlers':
83 void continueAfterDESCRIBE(RTSPClient* rtspClient,
84 int resultCode, char* resultString)
86 UsageEnvironment& env = rtspClient->envir();
87 HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
88 do {
89 if (resultCode != 0) {
90 env << *rtspClient << "Failed to get a SDP description: "
91 << resultString << "\n";
92 delete[] resultString;
94 break;
97 char* const sdpDescription = resultString;
98 env << *rtspClient << "Got a SDP description:\n"
99 << sdpDescription << "\n";
101 // Create a media session object from this SDP description:
102 client->session = MediaSession::createNew(env, sdpDescription);
103 delete[] sdpDescription; // because we don't need it anymore
104 if (client->session == NULL) {
105 env << *rtspClient
106 << "Failed to create a MediaSession object "
107 "from the SDP description: "
108 << env.getResultMsg() << "\n";
110 break;
111 } else if (!client->session->hasSubsessions()) {
112 env << *rtspClient << "This session has no media subsessions"
113 " (i.e., no \"m=\" lines)\n";
115 break;
118 // Then, create and set up our data source objects for the session.
119 // We do this by iterating over the session's 'subsessions',
120 // calling "MediaSubsession::initiate()",
121 // and then sending a RTSP "SETUP" command, on each one.
122 // (Each 'subsession' will have its own data source.)
123 client->iter = new MediaSubsessionIterator(*client->session);
124 setupNextSubsession(rtspClient);
125 return;
126 } while (0);
128 // An unrecoverable error occurred with this stream.
129 shutdownStream(rtspClient);
133 void setupNextSubsession(RTSPClient* rtspClient)
135 UsageEnvironment& env = rtspClient->envir();
136 HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
138 client->subsession = client->iter->next();
139 if (client->subsession != NULL) {
140 if (!client->subsession->initiate()) {
142 env << *rtspClient << "Failed to initiate the \""
143 << *client->subsession << "\" subsession: "
144 << env.getResultMsg() << "\n";
146 // give up on this subsession; go to the next one
147 setupNextSubsession(rtspClient);
149 else {
150 env << *rtspClient << "Initiated the \""
151 << *client->subsession << "\" subsession (";
153 if (client->subsession->rtcpIsMuxed()) {
154 env << "client port " << client->subsession->clientPortNum();
155 } else {
156 env << "client ports " << client->subsession->clientPortNum()
157 << "-" << client->subsession->clientPortNum() + 1;
159 env << ")\n";
161 // Continue setting up this subsession,
162 // by sending a RTSP "SETUP" command:
163 rtspClient->sendSetupCommand(*client->subsession,
164 continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP);
166 return;
169 // We've finished setting up all of the subsessions.
170 // Now, send a RTSP "PLAY" command to start the streaming:
171 if (client->session->absStartTime() != NULL) {
172 // Special case: The stream is indexed by 'absolute' time,
173 // so send an appropriate "PLAY" command:
174 rtspClient->sendPlayCommand(*client->session, continueAfterPLAY,
175 client->session->absStartTime(), client->session->absEndTime());
176 } else {
177 client->duration = client->session->playEndTime()
178 - client->session->playStartTime();
179 rtspClient->sendPlayCommand(*client->session, continueAfterPLAY);
184 void continueAfterSETUP(RTSPClient* rtspClient,
185 int resultCode, char* resultString)
187 do {
188 UsageEnvironment& env = rtspClient->envir();
189 HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
191 if (resultCode != 0) {
192 env << *rtspClient << "Failed to set up the \""
193 << *client->subsession << "\" subsession: "
194 << resultString << "\n";
195 break;
198 env << *rtspClient << "Set up the \""
199 << *client->subsession << "\" subsession (";
200 if (client->subsession->rtcpIsMuxed()) {
201 env << "client port " << client->subsession->clientPortNum();
202 } else {
203 env << "client ports " << client->subsession->clientPortNum()
204 << "-" << client->subsession->clientPortNum() + 1;
206 env << ")\n";
208 // Having successfully setup the subsession, create a data sink for it,
209 // and call "startPlaying()" on it.
210 // (This will prepare the data sink to receive data; the actual
211 // flow of data from the client won't start happening until later,
212 // after we've sent a RTSP "PLAY" command.)
214 client->subsession->sink = AdapterSink::createNew(env, *client->subsession,
215 ((HaikuRTSPClient*)rtspClient)->GetInputAdapter(), rtspClient->url());
216 // perhaps use your own custom "MediaSink" subclass instead
217 if (client->subsession->sink == NULL) {
218 env << *rtspClient << "Failed to create a data sink for the \""
219 << *client->subsession << "\" subsession: "
220 << env.getResultMsg() << "\n";
221 break;
224 env << *rtspClient << "Created a data sink for the \""
225 << *client->subsession << "\" subsession\n";
226 // a hack to let subsession handler functions
227 // get the "RTSPClient" from the subsession
228 client->subsession->miscPtr = rtspClient;
229 client->subsession->sink
230 ->startPlaying(*(client->subsession->readSource()),
231 subsessionAfterPlaying, client->subsession);
232 // Also set a handler to be called if a RTCP "BYE"
233 // arrives for this subsession:
234 if (client->subsession->rtcpInstance() != NULL) {
235 client->subsession->rtcpInstance()->setByeHandler(
236 subsessionByeHandler,
237 client->subsession);
239 } while (0);
240 delete[] resultString;
242 // Set up the next subsession, if any:
243 setupNextSubsession(rtspClient);
247 void continueAfterPLAY(RTSPClient* rtspClient,
248 int resultCode, char* resultString)
250 Boolean success = False;
251 UsageEnvironment& env = rtspClient->envir();
252 HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
254 do {
255 if (resultCode != 0) {
256 env << *rtspClient << "Failed to start playing session: "
257 << resultString << "\n";
258 break;
261 // Set a timer to be handled at the end of the stream's
262 // expected duration (if the stream does not already signal its end
263 // using a RTCP "BYE"). This is optional. If, instead, you want
264 // to keep the stream active - e.g., so you can later
265 // 'seek' back within it and do another RTSP "PLAY"
266 // - then you can omit this code.
267 // (Alternatively, if you don't want to receive the entire stream,
268 // you could set this timer for some shorter value.)
269 if (client->duration > 0) {
270 // number of seconds extra to delay,
271 // after the stream's expected duration. (This is optional.)
272 unsigned const delaySlop = 2;
273 client->duration += delaySlop;
274 unsigned uSecsToDelay = (unsigned)(client->duration * 1000000);
275 client->streamTimerTask
276 = env.taskScheduler().scheduleDelayedTask(uSecsToDelay,
277 (TaskFunc*)streamTimerHandler, rtspClient);
280 env << *rtspClient << "Started playing session";
281 if (client->duration > 0) {
282 env << " (for up to " << client->duration << " seconds)";
284 env << "...\n";
286 success = True;
287 } while (0);
288 delete[] resultString;
290 if (!success) {
291 // An unrecoverable error occurred with this stream.
292 shutdownStream(rtspClient);
293 } else
294 client->NotifySucces();
297 // Implementation of the other event handlers:
299 void subsessionAfterPlaying(void* clientData)
301 MediaSubsession* subsession = (MediaSubsession*)clientData;
302 RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);
304 // Begin by closing this subsession's stream:
305 Medium::close(subsession->sink);
306 subsession->sink = NULL;
308 // Next, check whether *all* subsessions' streams have now been closed:
309 MediaSession& session = subsession->parentSession();
310 MediaSubsessionIterator iter(session);
311 while ((subsession = iter.next()) != NULL) {
312 if (subsession->sink != NULL)
313 return; // this subsession is still active
316 // All subsessions' streams have now been closed, so shutdown the client:
317 shutdownStream(rtspClient);
321 void subsessionByeHandler(void* clientData)
323 MediaSubsession* subsession = (MediaSubsession*)clientData;
324 RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
325 UsageEnvironment& env = rtspClient->envir();
327 env << *rtspClient << "Received RTCP \"BYE\" on \""
328 << *subsession << "\" subsession\n";
330 // Now act as if the subsession had closed:
331 subsessionAfterPlaying(subsession);
335 void streamTimerHandler(void* clientData)
337 HaikuRTSPClient* client = (HaikuRTSPClient*)clientData;
339 client->streamTimerTask = NULL;
341 // Shut down the stream:
342 shutdownStream(client);
346 void shutdownStream(RTSPClient* rtspClient, int exitCode)
348 UsageEnvironment& env = rtspClient->envir();
349 HaikuRTSPClient* client = (HaikuRTSPClient*) rtspClient;
351 // First, check whether any subsessions have still to be closed:
352 if (client->session != NULL) {
353 Boolean someSubsessionsWereActive = False;
354 MediaSubsessionIterator iter(*client->session);
355 MediaSubsession* subsession;
357 while ((subsession = iter.next()) != NULL) {
358 if (subsession->sink != NULL) {
359 Medium::close(subsession->sink);
360 subsession->sink = NULL;
362 if (subsession->rtcpInstance() != NULL) {
363 // in case the server sends a RTCP "BYE"
364 // while handling "TEARDOWN"
365 subsession->rtcpInstance()->setByeHandler(NULL, NULL);
368 someSubsessionsWereActive = True;
372 if (someSubsessionsWereActive) {
373 // Send a RTSP "TEARDOWN" command,
374 // to tell the server to shutdown the stream.
375 // Don't bother handling the response to the "TEARDOWN".
376 rtspClient->sendTeardownCommand(*client->session, NULL);
380 env << *rtspClient << "Closing the stream.\n";
381 Medium::close(rtspClient);
382 // Note that this will also cause this stream's
383 // "StreamClientState" structure to get reclaimed.
384 client->NotifyError();
388 AdapterSink* AdapterSink::createNew(UsageEnvironment& env,
389 MediaSubsession& subsession, BInputAdapter* inputAdapter,
390 char const* streamId)
392 return new AdapterSink(env, subsession, streamId, inputAdapter);
396 AdapterSink::AdapterSink(UsageEnvironment& env, MediaSubsession& subsession,
397 char const* streamId, BInputAdapter* inputAdapter)
399 MediaSink(env),
400 fSubsession(subsession),
401 fInputAdapter(inputAdapter)
403 fStreamId = strDup(streamId);
404 fReceiveBuffer = new u_int8_t[RECEIVE_BUFFER_SIZE];
408 AdapterSink::~AdapterSink()
410 delete[] fReceiveBuffer;
411 delete[] fStreamId;
415 void AdapterSink::afterGettingFrame(void* clientData, unsigned frameSize,
416 unsigned numTruncatedBytes, struct timeval presentationTime,
417 unsigned durationInMicroseconds)
419 AdapterSink* sink = (AdapterSink*)clientData;
420 sink->afterGettingFrame(frameSize, numTruncatedBytes,
421 presentationTime, durationInMicroseconds);
425 void
426 AdapterSink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
427 struct timeval presentationTime, unsigned /*durationInMicroseconds*/)
429 fInputAdapter->Write(fReceiveBuffer, frameSize);
430 continuePlaying();
434 Boolean
435 AdapterSink::continuePlaying()
437 if (fSource == NULL)
438 return False;
440 fSource->getNextFrame(fReceiveBuffer, RECEIVE_BUFFER_SIZE,
441 afterGettingFrame, this,
442 onSourceClosure, this);
443 return True;