arduino-audio-tools
All Classes Namespaces Files Functions Variables Typedefs Enumerations Friends Modules Pages
AudioClientRTSP.h
1
2#pragma once
3
20// Copyright (c) 1996-2023, Live Networks, Inc. All rights reserved
21// A demo application, showing how to create and run a RTSP client (that can
22// potentially receive multiple streams concurrently).
23//
24
25#include "AudioLogger.h"
26#include "Print.h" // Arduino Print
27// include live555
28#include "BasicUsageEnvironment.hh"
29//#include "liveMedia.hh"
30#include "RTSPClient.hh"
31
32// By default, we request that the server stream its data using RTP/UDP.
33// If, instead, you want to request that the server stream via RTP-over-TCP,
34// change the following to True:
35#define REQUEST_STREAMING_OVER_TCP false
36
37// by default, print verbose output from each "RTSPClient"
38#define RTSP_CLIENT_VERBOSITY_LEVEL 1
39// Even though we're not going to be doing anything with the incoming data, we
40// still need to receive it. Define the size of the buffer that we'll use:
41#define RTSP_SINK_BUFFER_SIZE 1024
42
43// If you don't want to see debugging output for each received frame, then
44// comment out the following line:
45#undef DEBUG_PRINT_EACH_RECEIVED_FRAME
46#define DEBUG_PRINT_EACH_RECEIVED_FRAME 0
47
49namespace audiotools_rtsp {
50
51class OurRTSPClient;
52// The main streaming routine (or each "rtsp://" URL):
53OurRTSPClient * openURL(UsageEnvironment& env, char const* progName, char const* rtspURL);
54// Counts how many streams (i.e., "RTSPClient"s) are currently in use.
55static unsigned rtspClientCount = 0;
56static char rtspEventLoopWatchVariable = 0;
57static Print* rtspOutput = nullptr;
58static uint32_t rtspSinkReceiveBufferSize = 0;
59static bool rtspUseTCP = REQUEST_STREAMING_OVER_TCP;
60
61} // namespace audiotools_rtsp
62
63namespace audio_tools {
64
72 public:
73 AudioClientRTSP(uint32_t receiveBufferSize = RTSP_SINK_BUFFER_SIZE, bool useTCP=REQUEST_STREAMING_OVER_TCP, bool blocking = false) {
74 setBufferSize(receiveBufferSize);
75 useTCP ? setTCP() : setUDP();
76 setBlocking(blocking);
77 }
78
79 void setBufferSize(int size){
80 audiotools_rtsp::rtspSinkReceiveBufferSize = size;
81 }
82
83 void setTCP(){
84 audiotools_rtsp::rtspUseTCP = true;
85 }
86
87 void setUDP(){
88 audiotools_rtsp::rtspUseTCP = false;
89 }
90
91 void setBlocking(bool flag){
92 is_blocking = flag;
93 }
94
96 void setLogin(const char* ssid, const char* password){
97 this->ssid = ssid;
98 this->password = password;
99 }
100
102 bool begin(const char* url, Print &out) {
103 audiotools_rtsp::rtspOutput = &out;
104 if (url==nullptr) {
105 return false;
106 }
107 if (!login()){
108 LOGE("wifi down");
109 return false;
110 }
111 // Begin by setting up our usage environment:
112 scheduler = BasicTaskScheduler::createNew();
113 env = BasicUsageEnvironment::createNew(*scheduler);
114
115 // There are argc-1 URLs: argv[1] through argv[argc-1]. Open and start
116 // streaming each one:
117 rtsp_client = audiotools_rtsp::openURL(*env, "RTSPClient", url);
118
119 // All subsequent activity takes place within the event loop:
120 if (is_blocking) env->taskScheduler().doEventLoop(&audiotools_rtsp::rtspEventLoopWatchVariable);
121 // This function call does not return, unless, at some point in time,
122 // "rtspEventLoopWatchVariable" gets set to something non-zero.
123
124 return true;
125 }
126
128 void loop() {
129 if (audiotools_rtsp::rtspEventLoopWatchVariable==0) scheduler->SingleStep();
130 }
131
132 void end() {
133 audiotools_rtsp::rtspEventLoopWatchVariable = 1;
134 env->reclaim();
135 env = NULL;
136 delete scheduler;
137 scheduler = NULL;
138 bool is_blocking = false;
139 }
140
142 return rtsp_client;
143 }
144
145 protected:
147 UsageEnvironment* env=nullptr;
148 BasicTaskScheduler* scheduler=nullptr;
149 const char* ssid=nullptr;
150 const char* password = nullptr;
151 bool is_blocking = false;
152
154 bool login(){
155 if(WiFi.status() != WL_CONNECTED && ssid!=nullptr && password!=nullptr){
156 WiFi.mode(WIFI_STA);
157 WiFi.begin(ssid, password);
158 while(WiFi.status() != WL_CONNECTED){
159 Serial.print(".");
160 delay(100);
161 }
162 Serial.println();
163 Serial.print("Local Address: ");
164 Serial.println(WiFi.localIP());
165 }
166 return WiFi.status() == WL_CONNECTED;
167 }
168
169
170};
171
172} // namespace audio_tools
173
174namespace audiotools_rtsp {
175// Define a class to hold per-stream state that we maintain throughout each
176// stream's lifetime:
177
178// Forward function definitions:
179
180// RTSP 'response handlers':
181void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode,
182 char* resultString);
183void continueAfterSETUP(RTSPClient* rtspClient, int resultCode,
184 char* resultString);
185void continueAfterPLAY(RTSPClient* rtspClient, int resultCode,
186 char* resultString);
187
188// Other event handler functions:
189void subsessionAfterPlaying(
190 void* clientData); // called when a stream's subsession (e.g., audio or
191 // video substream) ends
192void subsessionByeHandler(void* clientData, char const* reason);
193// called when a RTCP "BYE" is received for a subsession
194void streamTimerHandler(void* clientData);
195// called at the end of a stream's expected duration (if the stream has not
196// already signaled its end using a RTCP "BYE")
197
198// Used to iterate through each stream's 'subsessions', setting up each one:
199void setupNextSubsession(RTSPClient* rtspClient);
200
201// Used to shut down and close a stream (including its "RTSPClient" object):
202void shutdownStream(RTSPClient* rtspClient, int exitCode = 1);
203
204// A function that outputs a string that identifies each stream (for debugging
205// output). Modify this if you wish:
206UsageEnvironment& operator<<(UsageEnvironment& env,
207 const RTSPClient& rtspClient) {
208 return env << "[URL:\"" << rtspClient.url() << "\"]: ";
209}
210
211// A function that outputs a string that identifies each subsession (for
212// debugging output). Modify this if you wish:
213UsageEnvironment& operator<<(UsageEnvironment& env,
214 const MediaSubsession& subsession) {
215 return env << subsession.mediumName() << "/" << subsession.codecName();
216}
217
219 public:
221 virtual ~StreamClientState();
222
223 public:
224 MediaSubsessionIterator* iter;
225 MediaSession* session;
226 MediaSubsession* subsession;
227 TaskToken streamTimerTask;
228 double duration;
229};
230
231// If you're streaming just a single stream (i.e., just from a single URL,
232// once), then you can define and use just a single "StreamClientState"
233// structure, as a global variable in your application. However, because - in
234// this demo application - we're showing how to play multiple streams,
235// concurrently, we can't do that. Instead, we have to have a separate
236// "StreamClientState" structure for each "RTSPClient". To do this, we subclass
237// "RTSPClient", and add a "StreamClientState" field to the subclass:
238
239class OurRTSPClient : public RTSPClient {
240 public:
241 static OurRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL,
242 int verbosityLevel = 0,
243 char const* applicationName = NULL,
244 portNumBits tunnelOverHTTPPortNum = 0);
245
246 protected:
247 OurRTSPClient(UsageEnvironment& env, char const* rtspURL, int verbosityLevel,
248 char const* applicationName, portNumBits tunnelOverHTTPPortNum);
249 // called only by createNew();
250 virtual ~OurRTSPClient();
251
252 public:
254};
255
256// Define a data sink (a subclass of "MediaSink") to receive the data for each
257// subsession (i.e., each audio or video 'substream'). In practice, this might
258// be a class (or a chain of classes) that decodes and then renders the incoming
259// audio or video. Or it might be a "FileSink", for outputting the received data
260// into a file (as is done by the "openRTSP" application). In this example code,
261// however, we define a simple 'dummy' sink that receives incoming data, but
262// does nothing with it.
263
264class OurSink : public MediaSink {
265 public:
266 static OurSink* createNew(
267 UsageEnvironment& env,
268 MediaSubsession&
269 subsession, // identifies the kind of data that's being received
270 char const* streamId = NULL); // identifies the stream itself (optional)
271
272 private:
273 OurSink(UsageEnvironment& env, MediaSubsession& subsession,
274 char const* streamId);
275 // called only by "createNew()"
276 virtual ~OurSink();
277
278 static void afterGettingFrame(void* clientData, unsigned frameSize,
279 unsigned numTruncatedBytes,
280 struct timeval presentationTime,
281 unsigned durationInMicroseconds);
282 void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
283 struct timeval presentationTime,
284 unsigned durationInMicroseconds);
285
286 private:
287 // redefined virtual functions:
288 virtual Boolean continuePlaying();
289
290 private:
291 u_int8_t* fReceiveBuffer;
292 MediaSubsession& fSubsession;
293 char* fStreamId;
294};
295
296OurRTSPClient* openURL(UsageEnvironment& env, char const* progName, char const* rtspURL) {
297 // Begin by creating a "RTSPClient" object. Note that there is a separate
298 // "RTSPClient" object for each stream that we wish to receive (even if more
299 // than stream uses the same "rtsp://" URL).
300 OurRTSPClient* rtspClient = OurRTSPClient::createNew(
301 env, rtspURL, RTSP_CLIENT_VERBOSITY_LEVEL, progName);
302 if (rtspClient == NULL) {
303 env << "Failed to create a RTSP client for URL \"" << rtspURL
304 << "\": " << env.getResultMsg() << "\n";
305 return nullptr;
306 }
307
308 ++rtspClientCount;
309
310 // Next, send a RTSP "DESCRIBE" command, to get a SDP description for the
311 // stream. Note that this command - like all RTSP commands - is sent
312 // asynchronously; we do not block, waiting for a response. Instead, the
313 // following function call returns immediately, and we handle the RTSP
314 // response later, from within the event loop:
315 rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
316 return rtspClient;
317}
318
319// Implementation of the RTSP 'response handlers':
320
321void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode,
322 char* resultString) {
323 do {
324 UsageEnvironment& env = rtspClient->envir(); // alias
325 StreamClientState& scs = ((OurRTSPClient*)rtspClient)->scs; // alias
326
327 if (resultCode != 0) {
328 env << *rtspClient << "Failed to get a SDP description: " << resultString
329 << "\n";
330 delete[] resultString;
331 break;
332 }
333
334 char* const sdpDescription = resultString;
335 env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n";
336
337 // Create a media session object from this SDP description:
338 scs.session = MediaSession::createNew(env, sdpDescription);
339 delete[] sdpDescription; // because we don't need it anymore
340 if (scs.session == NULL) {
341 env << *rtspClient
342 << "Failed to create a MediaSession object from the SDP description: "
343 << env.getResultMsg() << "\n";
344 break;
345 } else if (!scs.session->hasSubsessions()) {
346 env << *rtspClient
347 << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
348 break;
349 }
350
351 // Then, create and set up our data source objects for the session. We do
352 // this by iterating over the session's 'subsessions', calling
353 // "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command,
354 // on each one. (Each 'subsession' will have its own data source.)
355 scs.iter = new MediaSubsessionIterator(*scs.session);
356 setupNextSubsession(rtspClient);
357 return;
358 } while (0);
359
360 // An unrecoverable error occurred with this stream.
361 shutdownStream(rtspClient);
362}
363
364void setupNextSubsession(RTSPClient* rtspClient) {
365 UsageEnvironment& env = rtspClient->envir(); // alias
366 StreamClientState& scs = ((OurRTSPClient*)rtspClient)->scs; // alias
367
368 scs.subsession = scs.iter->next();
369 if (scs.subsession != NULL) {
370 if (!scs.subsession->initiate()) {
371 env << *rtspClient << "Failed to initiate the \"" << *scs.subsession
372 << "\" subsession: " << env.getResultMsg() << "\n";
373 setupNextSubsession(
374 rtspClient); // give up on this subsession; go to the next one
375 } else {
376 env << *rtspClient << "Initiated the \"" << *scs.subsession
377 << "\" subsession (";
378 if (scs.subsession->rtcpIsMuxed()) {
379 env << "client port " << scs.subsession->clientPortNum();
380 } else {
381 env << "client ports " << scs.subsession->clientPortNum() << "-"
382 << scs.subsession->clientPortNum() + 1;
383 }
384 env << ")\n";
385
386 // Continue setting up this subsession, by sending a RTSP "SETUP" command:
387 rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False,
388 rtspUseTCP);
389 }
390 return;
391 }
392
393 // We've finished setting up all of the subsessions. Now, send a RTSP "PLAY"
394 // command to start the streaming:
395 if (scs.session->absStartTime() != NULL) {
396 // Special case: The stream is indexed by 'absolute' time, so send an
397 // appropriate "PLAY" command:
398 rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY,
399 scs.session->absStartTime(),
400 scs.session->absEndTime());
401 } else {
402 scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
403 rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
404 }
405}
406
407void continueAfterSETUP(RTSPClient* rtspClient, int resultCode,
408 char* resultString) {
409 do {
410 UsageEnvironment& env = rtspClient->envir(); // alias
411 StreamClientState& scs = ((OurRTSPClient*)rtspClient)->scs; // alias
412
413 if (resultCode != 0) {
414 env << *rtspClient << "Failed to set up the \"" << *scs.subsession
415 << "\" subsession: " << resultString << "\n";
416 break;
417 }
418
419 env << *rtspClient << "Set up the \"" << *scs.subsession
420 << "\" subsession (";
421 if (scs.subsession->rtcpIsMuxed()) {
422 env << "client port " << scs.subsession->clientPortNum();
423 } else {
424 env << "client ports " << scs.subsession->clientPortNum() << "-"
425 << scs.subsession->clientPortNum() + 1;
426 }
427 env << ")\n";
428
429 // Having successfully setup the subsession, create a data sink for it, and
430 // call "startPlaying()" on it. (This will prepare the data sink to receive
431 // data; the actual flow of data from the client won't start happening until
432 // later, after we've sent a RTSP "PLAY" command.)
433
434 scs.subsession->sink =
435 OurSink::createNew(env, *scs.subsession, rtspClient->url());
436 // perhaps use your own custom "MediaSink" subclass instead
437 if (scs.subsession->sink == NULL) {
438 env << *rtspClient << "Failed to create a data sink for the \""
439 << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n";
440 break;
441 }
442
443 env << *rtspClient << "Created a data sink for the \"" << *scs.subsession
444 << "\" subsession\n";
445 scs.subsession->miscPtr =
446 rtspClient; // a hack to let subsession handler functions get the
447 // "RTSPClient" from the subsession
448 scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
449 subsessionAfterPlaying, scs.subsession);
450 // Also set a handler to be called if a RTCP "BYE" arrives for this
451 // subsession:
452 if (scs.subsession->rtcpInstance() != NULL) {
453 scs.subsession->rtcpInstance()->setByeWithReasonHandler(
454 subsessionByeHandler, scs.subsession);
455 }
456 } while (0);
457 delete[] resultString;
458
459 // Set up the next subsession, if any:
460 setupNextSubsession(rtspClient);
461}
462
463void continueAfterPLAY(RTSPClient* rtspClient, int resultCode,
464 char* resultString) {
465 Boolean success = False;
466
467 do {
468 UsageEnvironment& env = rtspClient->envir(); // alias
469 StreamClientState& scs = ((OurRTSPClient*)rtspClient)->scs; // alias
470
471 if (resultCode != 0) {
472 env << *rtspClient << "Failed to start playing session: " << resultString
473 << "\n";
474 break;
475 }
476
477 // Set a timer to be handled at the end of the stream's expected duration
478 // (if the stream does not already signal its end using a RTCP "BYE"). This
479 // is optional. If, instead, you want to keep the stream active - e.g., so
480 // you can later 'seek' back within it and do another RTSP "PLAY" - then you
481 // can omit this code. (Alternatively, if you don't want to receive the
482 // entire stream, you could set this timer for some shorter value.)
483 if (scs.duration > 0) {
484 unsigned const delaySlop =
485 2; // number of seconds extra to delay, after the stream's expected
486 // duration. (This is optional.)
487 scs.duration += delaySlop;
488 unsigned uSecsToDelay = (unsigned)(scs.duration * 1000000);
489 scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(
490 uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
491 }
492
493 env << *rtspClient << "Started playing session";
494 if (scs.duration > 0) {
495 env << " (for up to " << scs.duration << " seconds)";
496 }
497 env << "...\n";
498
499 success = True;
500 } while (0);
501 delete[] resultString;
502
503 if (!success) {
504 // An unrecoverable error occurred with this stream.
505 shutdownStream(rtspClient);
506 }
507}
508
509// Implementation of the other event handlers:
510
511void subsessionAfterPlaying(void* clientData) {
512 MediaSubsession* subsession = (MediaSubsession*)clientData;
513 RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);
514
515 // Begin by closing this subsession's stream:
516 Medium::close(subsession->sink);
517 subsession->sink = NULL;
518
519 // Next, check whether *all* subsessions' streams have now been closed:
520 MediaSession& session = subsession->parentSession();
521 MediaSubsessionIterator iter(session);
522 while ((subsession = iter.next()) != NULL) {
523 if (subsession->sink != NULL) return; // this subsession is still active
524 }
525
526 // All subsessions' streams have now been closed, so shutdown the client:
527 shutdownStream(rtspClient);
528}
529
530void subsessionByeHandler(void* clientData, char const* reason) {
531 MediaSubsession* subsession = (MediaSubsession*)clientData;
532 RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
533 UsageEnvironment& env = rtspClient->envir(); // alias
534
535 env << *rtspClient << "Received RTCP \"BYE\"";
536 if (reason != NULL) {
537 env << " (reason:\"" << reason << "\")";
538 delete[] (char*)reason;
539 }
540 env << " on \"" << *subsession << "\" subsession\n";
541
542 // Now act as if the subsession had closed:
543 subsessionAfterPlaying(subsession);
544}
545
546void streamTimerHandler(void* clientData) {
547 OurRTSPClient* rtspClient = (OurRTSPClient*)clientData;
548 StreamClientState& scs = rtspClient->scs; // alias
549
550 scs.streamTimerTask = NULL;
551
552 // Shut down the stream:
553 shutdownStream(rtspClient);
554}
555
556void shutdownStream(RTSPClient* rtspClient, int exitCode) {
557 UsageEnvironment& env = rtspClient->envir(); // alias
558 StreamClientState& scs = ((OurRTSPClient*)rtspClient)->scs; // alias
559
560 // First, check whether any subsessions have still to be closed:
561 if (scs.session != NULL) {
562 Boolean someSubsessionsWereActive = False;
563 MediaSubsessionIterator iter(*scs.session);
564 MediaSubsession* subsession;
565
566 while ((subsession = iter.next()) != NULL) {
567 if (subsession->sink != NULL) {
568 Medium::close(subsession->sink);
569 subsession->sink = NULL;
570
571 if (subsession->rtcpInstance() != NULL) {
572 subsession->rtcpInstance()->setByeHandler(
573 NULL, NULL); // in case the server sends a RTCP "BYE" while
574 // handling "TEARDOWN"
575 }
576
577 someSubsessionsWereActive = True;
578 }
579 }
580
581 if (someSubsessionsWereActive) {
582 // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the
583 // stream. Don't bother handling the response to the "TEARDOWN".
584 rtspClient->sendTeardownCommand(*scs.session, NULL);
585 }
586 }
587
588 env << *rtspClient << "Closing the stream.\n";
589 Medium::close(rtspClient);
590 // Note that this will also cause this stream's "StreamClientState" structure
591 // to get reclaimed.
592
593 if (--rtspClientCount == 0) {
594 // The final stream has ended, so exit the application now.
595 // (Of course, if you're embedding this code into your own application, you
596 // might want to comment this out, and replace it with
597 // "rtspEventLoopWatchVariable = 1;", so that we leave the LIVE555 event loop,
598 // and continue running "main()".)
599 // exit(exitCode);
600 rtspEventLoopWatchVariable = 1;
601 return;
602 }
603}
604
605// Implementation of "OurRTSPClient":
606
607OurRTSPClient* OurRTSPClient::createNew(UsageEnvironment& env,
608 char const* rtspURL, int verbosityLevel,
609 char const* applicationName,
610 portNumBits tunnelOverHTTPPortNum) {
611 return new OurRTSPClient(env, rtspURL, verbosityLevel, applicationName,
612 tunnelOverHTTPPortNum);
613}
614
615OurRTSPClient::OurRTSPClient(UsageEnvironment& env, char const* rtspURL,
616 int verbosityLevel, char const* applicationName,
617 portNumBits tunnelOverHTTPPortNum)
618 : RTSPClient(env, rtspURL, verbosityLevel, applicationName,
619 tunnelOverHTTPPortNum, -1) {}
620
621OurRTSPClient::~OurRTSPClient() {}
622
623// Implementation of "StreamClientState":
624
625StreamClientState::StreamClientState()
626 : iter(NULL),
627 session(NULL),
628 subsession(NULL),
629 streamTimerTask(NULL),
630 duration(0.0) {}
631
632StreamClientState::~StreamClientState() {
633 delete iter;
634 if (session != NULL) {
635 // We also need to delete "session", and unschedule "streamTimerTask" (if
636 // set)
637 UsageEnvironment& env = session->envir(); // alias
638
639 env.taskScheduler().unscheduleDelayedTask(streamTimerTask);
640 Medium::close(session);
641 }
642}
643
644// Implementation of "OurSink":
645
646OurSink* OurSink::createNew(UsageEnvironment& env,
647 MediaSubsession& subsession,
648 char const* streamId) {
649 return new OurSink(env, subsession, streamId);
650}
651
652OurSink::OurSink(UsageEnvironment& env, MediaSubsession& subsession,
653 char const* streamId)
654 : MediaSink(env), fSubsession(subsession) {
655 fStreamId = strDup(streamId);
656 fReceiveBuffer = new u_int8_t[rtspSinkReceiveBufferSize];
657}
658
659OurSink::~OurSink() {
660 delete[] fReceiveBuffer;
661 delete[] fStreamId;
662}
663
664void OurSink::afterGettingFrame(void* clientData, unsigned frameSize,
665 unsigned numTruncatedBytes,
666 struct timeval presentationTime,
667 unsigned durationInMicroseconds) {
668 OurSink* sink = (OurSink*)clientData;
669 sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime,
670 durationInMicroseconds);
671}
672
673void OurSink::afterGettingFrame(unsigned frameSize,
674 unsigned numTruncatedBytes,
675 struct timeval presentationTime,
676 unsigned /*durationInMicroseconds*/) {
677 // We've just received a frame of data. (Optionally) print out information
678 // about it:
679#ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME
680 if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
681 envir() << fSubsession.mediumName() << "/" << fSubsession.codecName()
682 << ":\tReceived " << frameSize << " bytes";
683 if (numTruncatedBytes > 0)
684 envir() << " (with " << numTruncatedBytes << " bytes truncated)";
685 char uSecsStr[6 + 1]; // used to output the 'microseconds' part of the
686 // presentation time
687 snprintf(uSecsStr,7 , "%06u", (unsigned)presentationTime.tv_usec);
688 envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "."
689 << uSecsStr;
690 if (fSubsession.rtpSource() != NULL &&
691 !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) {
692 envir() << "!"; // mark the debugging output to indicate that this
693 // presentation time is not RTCP-synchronized
694 }
695#ifdef DEBUG_PRINT_NPT
696 envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
697#endif
698 envir() << "\n";
699#endif
700
701 // Decode the data
702 if (rtspOutput) {
703 size_t writtenSize = rtspOutput->write(fReceiveBuffer, frameSize);
704 assert(writtenSize == frameSize);
705 }
706
707 // Then continue, to request the next frame of data:
708 continuePlaying();
709}
710
711Boolean OurSink::continuePlaying() {
712 if (fSource == NULL) return False; // sanity check (should not happen)
713
714 // Request the next frame of data from our input source. "afterGettingFrame()"
715 // will get called later, when it arrives:
716 fSource->getNextFrame(fReceiveBuffer, rtspSinkReceiveBufferSize,
717 afterGettingFrame, this, onSourceClosure, this);
718 return True;
719}
720
721} // namespace audiotools_rtsp
A simple RTSPClient using https://github.com/pschatzmann/arduino-live555.
Definition AudioClientRTSP.h:71
bool login()
login to wifi: optional convinience method. You can also just start Wifi the normal way
Definition AudioClientRTSP.h:154
bool begin(const char *url, Print &out)
Starts the processing.
Definition AudioClientRTSP.h:102
void setLogin(const char *ssid, const char *password)
login to wifi: optional convinience method. You can also just start Wifi the normal way
Definition AudioClientRTSP.h:96
void loop()
to be called in Arduino loop when blocking = false
Definition AudioClientRTSP.h:128
Definition NoArduino.h:62
Definition AudioClientRTSP.h:239
Definition AudioClientRTSP.h:264
Definition AudioClientRTSP.h:218
Generic Implementation of sound input and output for desktop environments using portaudio.
Definition AudioCodecsBase.h:10
AudioTools internal: rtsp.
Definition AudioClientRTSP.h:49