Dynamically adjusts the effective playback sample rate based on the differences of the local and server clock since the start.
More...
#include <SnapTimeSync.h>
|
| SnapTimeSyncDynamicSinceStart (int processingLag=CONFIG_PROCESSING_TIME_MS, int interval=10) |
|
void | begin (int rate) |
| Starts the processing.
|
|
float | getFactor () |
|
int | getStartDelay () |
|
bool | isSync () |
| Returns true if a synchronization (update of the sampling rate) is needed.
|
|
void | setInterval (int interval) |
|
void | setMessageBufferDelay (int ms) |
| Defines the message buffer lag.
|
|
void | setProcessingLag (int lag) |
|
virtual void | updateActualDelay (int delay) |
| Records the actual playback delay (currently not used)
|
|
void | updateServerTime (uint32_t serverMillis) override |
| Records the actual server time in millisecondes.
|
|
|
bool | active = false |
|
SnapTimePoints | current_time |
|
int | interval = 10 |
|
uint16_t | message_buffer_delay_ms = 0 |
|
int | processing_lag = 0 |
|
SnapTimePoints | start_time |
|
const char * | TAG = "SnapTimeSync" |
|
uint64_t | update_count = 0 |
|
Dynamically adjusts the effective playback sample rate based on the differences of the local and server clock since the start.
- Author
- Phil Schatzmann
- Version
- 0.1
- Date
- 2023-10-28
- Copyright
- Copyright (c) 2023
◆ getFactor()
Calculate the resampling factor: with a positive delay we play too fast and need to slow down
Implements SnapTimeSync.
◆ getStartDelay()
Provides the effective delay to be used (Message buffer lag - decoding/playback time)
◆ setInterval()
void setInterval |
( |
int |
interval | ) |
|
|
inlineinherited |
Defines the interval that is used to adjust the sample rate: 10 means every 10 updates.
◆ setProcessingLag()
void setProcessingLag |
( |
int |
lag | ) |
|
|
inlineinherited |
Defines the lag which is substracted from the message_buffer_delay_ms. It conists of the delay added by the decoder and your selected output device
The documentation for this class was generated from the following file: