Port remove unsafe code and catch up with dev (#956)

This commit is contained in:
Jason Gray 2022-02-14 05:15:19 -06:00 committed by GitHub
parent ab562cc8d8
commit 47f1362453
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 366 additions and 241 deletions

View file

@ -28,6 +28,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [main] Prevent hang when discovery is disabled and there are no credentials or when bad credentials are given. - [main] Prevent hang when discovery is disabled and there are no credentials or when bad credentials are given.
- [main] Don't panic when parsing options. Instead list valid values and exit. - [main] Don't panic when parsing options. Instead list valid values and exit.
- [main] `--alsa-mixer-device` and `--alsa-mixer-index` now fallback to the card and index specified in `--device`. - [main] `--alsa-mixer-device` and `--alsa-mixer-index` now fallback to the card and index specified in `--device`.
- [core] Removed unsafe code (breaking)
- [playback] Adhere to ReplayGain spec when calculating gain normalisation factor.
- [playback] `alsa`: Use `--volume-range` overrides for softvol controls
- [connect] Don't panic when activating shuffle without previous interaction.
### Removed ### Removed
- [playback] `alsamixer`: previously deprecated option `mixer-card` has been removed. - [playback] `alsamixer`: previously deprecated option `mixer-card` has been removed.

View file

@ -859,15 +859,15 @@ impl SpircTask {
self.state.set_shuffle(update.get_state().get_shuffle()); self.state.set_shuffle(update.get_state().get_shuffle());
if self.state.get_shuffle() { if self.state.get_shuffle() {
let current_index = self.state.get_playing_track_index(); let current_index = self.state.get_playing_track_index();
{ let tracks = self.state.mut_track();
let tracks = self.state.mut_track(); if !tracks.is_empty() {
tracks.swap(0, current_index as usize); tracks.swap(0, current_index as usize);
if let Some((_, rest)) = tracks.split_first_mut() { if let Some((_, rest)) = tracks.split_first_mut() {
let mut rng = rand::thread_rng(); let mut rng = rand::thread_rng();
rest.shuffle(&mut rng); rest.shuffle(&mut rng);
} }
self.state.set_playing_track_index(0);
} }
self.state.set_playing_track_index(0);
} else { } else {
let context = self.state.get_context_uri(); let context = self.state.get_context_uri();
debug!("{:?}", context); debug!("{:?}", context);

View file

@ -368,12 +368,17 @@ impl Cache {
} }
pub fn file_path(&self, file: FileId) -> Option<PathBuf> { pub fn file_path(&self, file: FileId) -> Option<PathBuf> {
self.audio_location.as_ref().map(|location| { match file.to_base16() {
let name = file.to_base16(); Ok(name) => self.audio_location.as_ref().map(|location| {
let mut path = location.join(&name[0..2]); let mut path = location.join(&name[0..2]);
path.push(&name[2..]); path.push(&name[2..]);
path path
}) }),
Err(e) => {
warn!("{}", e);
None
}
}
} }
pub fn file(&self, file: FileId) -> Option<File> { pub fn file(&self, file: FileId) -> Option<File> {

View file

@ -2,7 +2,7 @@ use std::fmt;
use librespot_protocol as protocol; use librespot_protocol as protocol;
use crate::spotify_id::to_base16; use crate::{spotify_id::to_base16, Error};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub [u8; 20]); pub struct FileId(pub [u8; 20]);
@ -14,7 +14,8 @@ impl FileId {
FileId(dst) FileId(dst)
} }
pub fn to_base16(&self) -> String { #[allow(clippy::wrong_self_convention)]
pub fn to_base16(&self) -> Result<String, Error> {
to_base16(&self.0, &mut [0u8; 40]) to_base16(&self.0, &mut [0u8; 40])
} }
} }
@ -27,7 +28,7 @@ impl fmt::Debug for FileId {
impl fmt::Display for FileId { impl fmt::Display for FileId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_base16()) f.write_str(&self.to_base16().unwrap_or_default())
} }
} }

View file

@ -354,7 +354,7 @@ impl SpClient {
} }
pub async fn get_metadata(&self, scope: &str, id: SpotifyId) -> SpClientResult { pub async fn get_metadata(&self, scope: &str, id: SpotifyId) -> SpClientResult {
let endpoint = format!("/metadata/4/{}/{}", scope, id.to_base16()); let endpoint = format!("/metadata/4/{}/{}", scope, id.to_base16()?);
self.request(&Method::GET, &endpoint, None, None).await self.request(&Method::GET, &endpoint, None, None).await
} }
@ -379,7 +379,7 @@ impl SpClient {
} }
pub async fn get_lyrics(&self, track_id: SpotifyId) -> SpClientResult { pub async fn get_lyrics(&self, track_id: SpotifyId) -> SpClientResult {
let endpoint = format!("/color-lyrics/v1/track/{}", track_id.to_base62()); let endpoint = format!("/color-lyrics/v1/track/{}", track_id.to_base62()?);
self.request_as_json(&Method::GET, &endpoint, None, None) self.request_as_json(&Method::GET, &endpoint, None, None)
.await .await
@ -392,7 +392,7 @@ impl SpClient {
) -> SpClientResult { ) -> SpClientResult {
let endpoint = format!( let endpoint = format!(
"/color-lyrics/v2/track/{}/image/spotify:image:{}", "/color-lyrics/v2/track/{}/image/spotify:image:{}",
track_id.to_base62(), track_id.to_base62()?,
image_id image_id
); );
@ -416,7 +416,7 @@ impl SpClient {
pub async fn get_audio_storage(&self, file_id: FileId) -> SpClientResult { pub async fn get_audio_storage(&self, file_id: FileId) -> SpClientResult {
let endpoint = format!( let endpoint = format!(
"/storage-resolve/files/audio/interactive/{}", "/storage-resolve/files/audio/interactive/{}",
file_id.to_base16() file_id.to_base16()?
); );
self.request(&Method::GET, &endpoint, None, None).await self.request(&Method::GET, &endpoint, None, None).await
} }
@ -459,7 +459,7 @@ impl SpClient {
.get_user_attribute(attribute) .get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?; .ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let mut url = template.replace("{id}", &preview_id.to_base16()); let mut url = template.replace("{id}", &preview_id.to_base16()?);
let separator = match url.find('?') { let separator = match url.find('?') {
Some(_) => "&", Some(_) => "&",
None => "?", None => "?",
@ -477,7 +477,7 @@ impl SpClient {
.get_user_attribute(attribute) .get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?; .ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let url = template.replace("{file_id}", &file_id.to_base16()); let url = template.replace("{file_id}", &file_id.to_base16()?);
self.request_url(url).await self.request_url(url).await
} }
@ -488,7 +488,7 @@ impl SpClient {
.session() .session()
.get_user_attribute(attribute) .get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?; .ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let url = template.replace("{file_id}", &image_id.to_base16()); let url = template.replace("{file_id}", &image_id.to_base16()?);
self.request_url(url).await self.request_url(url).await
} }

View file

@ -191,7 +191,8 @@ impl SpotifyId {
/// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE16` (32) /// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE16` (32)
/// character long `String`. /// character long `String`.
pub fn to_base16(&self) -> String { #[allow(clippy::wrong_self_convention)]
pub fn to_base16(&self) -> Result<String, Error> {
to_base16(&self.to_raw(), &mut [0u8; Self::SIZE_BASE16]) to_base16(&self.to_raw(), &mut [0u8; Self::SIZE_BASE16])
} }
@ -199,7 +200,9 @@ impl SpotifyId {
/// character long `String`. /// character long `String`.
/// ///
/// [canonically]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [canonically]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn to_base62(&self) -> String {
#[allow(clippy::wrong_self_convention)]
pub fn to_base62(&self) -> Result<String, Error> {
let mut dst = [0u8; 22]; let mut dst = [0u8; 22];
let mut i = 0; let mut i = 0;
let n = self.id; let n = self.id;
@ -237,14 +240,12 @@ impl SpotifyId {
dst.reverse(); dst.reverse();
unsafe { String::from_utf8(dst.to_vec()).map_err(|_| SpotifyIdError::InvalidId.into())
// Safety: We are only dealing with ASCII characters.
String::from_utf8_unchecked(dst.to_vec())
}
} }
/// Returns a copy of the `SpotifyId` as an array of `SpotifyId::SIZE` (16) bytes in /// Returns a copy of the `SpotifyId` as an array of `SpotifyId::SIZE` (16) bytes in
/// big-endian order. /// big-endian order.
#[allow(clippy::wrong_self_convention)]
pub fn to_raw(&self) -> [u8; Self::SIZE] { pub fn to_raw(&self) -> [u8; Self::SIZE] {
self.id.to_be_bytes() self.id.to_be_bytes()
} }
@ -257,7 +258,9 @@ impl SpotifyId {
/// be encoded as `unknown`. /// be encoded as `unknown`.
/// ///
/// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn to_uri(&self) -> String {
#[allow(clippy::wrong_self_convention)]
pub fn to_uri(&self) -> Result<String, Error> {
// 8 chars for the "spotify:" prefix + 1 colon + 22 chars base62 encoded ID = 31 // 8 chars for the "spotify:" prefix + 1 colon + 22 chars base62 encoded ID = 31
// + unknown size item_type. // + unknown size item_type.
let item_type: &str = self.item_type.into(); let item_type: &str = self.item_type.into();
@ -265,21 +268,24 @@ impl SpotifyId {
dst.push_str("spotify:"); dst.push_str("spotify:");
dst.push_str(item_type); dst.push_str(item_type);
dst.push(':'); dst.push(':');
dst.push_str(&self.to_base62()); let base_62 = self.to_base62()?;
dst.push_str(&base_62);
dst Ok(dst)
} }
} }
impl fmt::Debug for SpotifyId { impl fmt::Debug for SpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("SpotifyId").field(&self.to_uri()).finish() f.debug_tuple("SpotifyId")
.field(&self.to_uri().unwrap_or_else(|_| "invalid uri".into()))
.finish()
} }
} }
impl fmt::Display for SpotifyId { impl fmt::Display for SpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_uri()) f.write_str(&self.to_uri().unwrap_or_else(|_| "invalid uri".into()))
} }
} }
@ -312,16 +318,17 @@ impl NamedSpotifyId {
}) })
} }
pub fn to_uri(&self) -> String { pub fn to_uri(&self) -> Result<String, Error> {
let item_type: &str = self.inner_id.item_type.into(); let item_type: &str = self.inner_id.item_type.into();
let mut dst = String::with_capacity(37 + self.username.len() + item_type.len()); let mut dst = String::with_capacity(37 + self.username.len() + item_type.len());
dst.push_str("spotify:user:"); dst.push_str("spotify:user:");
dst.push_str(&self.username); dst.push_str(&self.username);
dst.push_str(item_type); dst.push_str(item_type);
dst.push(':'); dst.push(':');
dst.push_str(&self.to_base62()); let base_62 = self.to_base62()?;
dst.push_str(&base_62);
dst Ok(dst)
} }
pub fn from_spotify_id(id: SpotifyId, username: String) -> Self { pub fn from_spotify_id(id: SpotifyId, username: String) -> Self {
@ -342,14 +349,24 @@ impl Deref for NamedSpotifyId {
impl fmt::Debug for NamedSpotifyId { impl fmt::Debug for NamedSpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("NamedSpotifyId") f.debug_tuple("NamedSpotifyId")
.field(&self.inner_id.to_uri()) .field(
&self
.inner_id
.to_uri()
.unwrap_or_else(|_| "invalid id".into()),
)
.finish() .finish()
} }
} }
impl fmt::Display for NamedSpotifyId { impl fmt::Display for NamedSpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.inner_id.to_uri()) f.write_str(
&self
.inner_id
.to_uri()
.unwrap_or_else(|_| "invalid id".into()),
)
} }
} }
@ -495,7 +512,7 @@ impl TryFrom<&protocol::playlist_annotate3::TranscodedPicture> for SpotifyId {
} }
} }
pub fn to_base16(src: &[u8], buf: &mut [u8]) -> String { pub fn to_base16(src: &[u8], buf: &mut [u8]) -> Result<String, Error> {
let mut i = 0; let mut i = 0;
for v in src { for v in src {
buf[i] = BASE16_DIGITS[(v >> 4) as usize]; buf[i] = BASE16_DIGITS[(v >> 4) as usize];
@ -503,10 +520,7 @@ pub fn to_base16(src: &[u8], buf: &mut [u8]) -> String {
i += 2; i += 2;
} }
unsafe { String::from_utf8(buf.to_vec()).map_err(|_| SpotifyIdError::InvalidId.into())
// Safety: We are only dealing with ASCII characters.
String::from_utf8_unchecked(buf.to_vec())
}
} }
#[cfg(test)] #[cfg(test)]
@ -623,7 +637,7 @@ mod tests {
item_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_base62(), c.base62); assert_eq!(id.to_base62().unwrap(), c.base62);
} }
} }
@ -646,7 +660,7 @@ mod tests {
item_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_base16(), c.base16); assert_eq!(id.to_base16().unwrap(), c.base16);
} }
} }
@ -672,7 +686,7 @@ mod tests {
item_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_uri(), c.uri); assert_eq!(id.to_uri().unwrap(), c.uri);
} }
} }

View file

@ -19,11 +19,13 @@ async fn main() {
} }
let credentials = Credentials::with_password(&args[1], &args[2]); let credentials = Credentials::with_password(&args[1], &args[2]);
let uri_split = args[3].split(':'); let plist_uri = SpotifyId::from_uri(&args[3]).unwrap_or_else(|_| {
let uri_parts: Vec<&str> = uri_split.collect(); eprintln!(
println!("{}, {}, {}", uri_parts[0], uri_parts[1], uri_parts[2]); "PLAYLIST should be a playlist URI such as: \
\"spotify:playlist:37i9dQZF1DXec50AjHrNTq\""
let plist_uri = SpotifyId::from_base62(uri_parts[2]).unwrap(); );
exit(1);
});
let session = Session::new(session_config, None); let session = Session::new(session_config, None);
if let Err(e) = session.connect(credentials).await { if let Err(e) = session.connect(credentials).await {

View file

@ -74,7 +74,7 @@ impl InnerAudioItem for Episode {
Ok(AudioItem { Ok(AudioItem {
id, id,
spotify_uri: id.to_uri(), spotify_uri: id.to_uri()?,
files: episode.audio, files: episode.audio,
name: episode.name, name: episode.name,
duration: episode.duration, duration: episode.duration,

View file

@ -52,7 +52,7 @@ impl PlaylistAnnotation {
let uri = format!( let uri = format!(
"hm://playlist-annotate/v1/annotation/user/{}/playlist/{}", "hm://playlist-annotate/v1/annotation/user/{}/playlist/{}",
username, username,
playlist_id.to_base62() playlist_id.to_base62()?
); );
<Self as MercuryRequest>::request(session, &uri).await <Self as MercuryRequest>::request(session, &uri).await
} }

View file

@ -104,7 +104,7 @@ impl Playlist {
let uri = format!( let uri = format!(
"hm://playlist/user/{}/playlist/{}", "hm://playlist/user/{}/playlist/{}",
username, username,
playlist_id.to_base62() playlist_id.to_base62()?
); );
<Self as MercuryRequest>::request(session, &uri).await <Self as MercuryRequest>::request(session, &uri).await
} }
@ -152,7 +152,7 @@ impl Metadata for Playlist {
type Message = protocol::playlist4_external::SelectedListContent; type Message = protocol::playlist4_external::SelectedListContent;
async fn request(session: &Session, playlist_id: SpotifyId) -> RequestResult { async fn request(session: &Session, playlist_id: SpotifyId) -> RequestResult {
let uri = format!("hm://playlist/v2/playlist/{}", playlist_id.to_base62()); let uri = format!("hm://playlist/v2/playlist/{}", playlist_id.to_base62()?);
<Self as MercuryRequest>::request(session, &uri).await <Self as MercuryRequest>::request(session, &uri).await
} }

View file

@ -88,7 +88,7 @@ impl InnerAudioItem for Track {
Ok(AudioItem { Ok(AudioItem {
id, id,
spotify_uri: id.to_uri(), spotify_uri: id.to_uri()?,
files: track.files, files: track.files,
name: track.name, name: track.name,
duration: track.duration, duration: track.duration,

View file

@ -130,7 +130,7 @@ pub struct PlayerConfig {
pub normalisation: bool, pub normalisation: bool,
pub normalisation_type: NormalisationType, pub normalisation_type: NormalisationType,
pub normalisation_method: NormalisationMethod, pub normalisation_method: NormalisationMethod,
pub normalisation_pregain_db: f32, pub normalisation_pregain_db: f64,
pub normalisation_threshold_dbfs: f64, pub normalisation_threshold_dbfs: f64,
pub normalisation_attack_cf: f64, pub normalisation_attack_cf: f64,
pub normalisation_release_cf: f64, pub normalisation_release_cf: f64,

View file

@ -23,14 +23,15 @@ pub struct Converter {
impl Converter { impl Converter {
pub fn new(dither_config: Option<DithererBuilder>) -> Self { pub fn new(dither_config: Option<DithererBuilder>) -> Self {
if let Some(ref ditherer_builder) = dither_config { match dither_config {
let ditherer = (ditherer_builder)(); Some(ditherer_builder) => {
info!("Converting with ditherer: {}", ditherer.name()); let ditherer = (ditherer_builder)();
Self { info!("Converting with ditherer: {}", ditherer.name());
ditherer: Some(ditherer), Self {
ditherer: Some(ditherer),
}
} }
} else { None => Self { ditherer: None },
Self { ditherer: None }
} }
} }
@ -52,18 +53,15 @@ impl Converter {
const SCALE_S16: f64 = 32768.; const SCALE_S16: f64 = 32768.;
pub fn scale(&mut self, sample: f64, factor: f64) -> f64 { pub fn scale(&mut self, sample: f64, factor: f64) -> f64 {
let dither = match self.ditherer {
Some(ref mut d) => d.noise(),
None => 0.0,
};
// From the many float to int conversion methods available, match what // From the many float to int conversion methods available, match what
// the reference Vorbis implementation uses: sample * 32768 (for 16 bit) // the reference Vorbis implementation uses: sample * 32768 (for 16 bit)
let int_value = sample * factor + dither;
// Casting float to integer rounds towards zero by default, i.e. it // Casting float to integer rounds towards zero by default, i.e. it
// truncates, and that generates larger error than rounding to nearest. // truncates, and that generates larger error than rounding to nearest.
int_value.round() match self.ditherer.as_mut() {
Some(d) => (sample * factor + d.noise()).round(),
None => (sample * factor).round(),
}
} }
// Special case for samples packed in a word of greater bit depth (e.g. // Special case for samples packed in a word of greater bit depth (e.g.
@ -79,11 +77,12 @@ impl Converter {
let max = factor - 1.0; let max = factor - 1.0;
if int_value < min { if int_value < min {
return min; min
} else if int_value > max { } else if int_value > max {
return max; max
} else {
int_value
} }
int_value
} }
pub fn f64_to_f32(&mut self, samples: &[f64]) -> Vec<f32> { pub fn f64_to_f32(&mut self, samples: &[f64]) -> Vec<f32> {
@ -109,12 +108,7 @@ impl Converter {
pub fn f64_to_s24_3(&mut self, samples: &[f64]) -> Vec<i24> { pub fn f64_to_s24_3(&mut self, samples: &[f64]) -> Vec<i24> {
samples samples
.iter() .iter()
.map(|sample| { .map(|sample| i24::from_s24(self.clamping_scale(*sample, Self::SCALE_S24) as i32))
// Not as DRY as calling f32_to_s24 first, but this saves iterating
// over all samples twice.
let int_value = self.clamping_scale(*sample, Self::SCALE_S24) as i32;
i24::from_s24(int_value)
})
.collect() .collect()
} }

View file

@ -3,7 +3,7 @@ use rand::SeedableRng;
use rand_distr::{Distribution, Normal, Triangular, Uniform}; use rand_distr::{Distribution, Normal, Triangular, Uniform};
use std::fmt; use std::fmt;
const NUM_CHANNELS: usize = 2; use crate::NUM_CHANNELS;
// Dithering lowers digital-to-analog conversion ("requantization") error, // Dithering lowers digital-to-analog conversion ("requantization") error,
// linearizing output, lowering distortion and replacing it with a constant, // linearizing output, lowering distortion and replacing it with a constant,
@ -102,7 +102,7 @@ impl GaussianDitherer {
pub struct HighPassDitherer { pub struct HighPassDitherer {
active_channel: usize, active_channel: usize,
previous_noises: [f64; NUM_CHANNELS], previous_noises: [f64; NUM_CHANNELS as usize],
cached_rng: SmallRng, cached_rng: SmallRng,
distribution: Uniform<f64>, distribution: Uniform<f64>,
} }
@ -111,7 +111,7 @@ impl Ditherer for HighPassDitherer {
fn new() -> Self { fn new() -> Self {
Self { Self {
active_channel: 0, active_channel: 0,
previous_noises: [0.0; NUM_CHANNELS], previous_noises: [0.0; NUM_CHANNELS as usize],
cached_rng: create_rng(), cached_rng: create_rng(),
distribution: Uniform::new_inclusive(-0.5, 0.5), // 1 LSB +/- 1 LSB (previous) = 2 LSB distribution: Uniform::new_inclusive(-0.5, 0.5), // 1 LSB +/- 1 LSB (previous) = 2 LSB
} }

View file

@ -84,7 +84,7 @@ impl Mixer for AlsaMixer {
warn!("Alsa rounding error detected, setting maximum dB to {:.2} instead of {:.2}", ZERO_DB.to_db(), max_millibel.to_db()); warn!("Alsa rounding error detected, setting maximum dB to {:.2} instead of {:.2}", ZERO_DB.to_db(), max_millibel.to_db());
max_millibel = ZERO_DB; max_millibel = ZERO_DB;
} else { } else {
warn!("Please manually set with `--volume-ctrl` if this is incorrect"); warn!("Please manually set `--volume-range` if this is incorrect");
} }
} }
(min_millibel, max_millibel) (min_millibel, max_millibel)
@ -104,12 +104,23 @@ impl Mixer for AlsaMixer {
let min_db = min_millibel.to_db() as f64; let min_db = min_millibel.to_db() as f64;
let max_db = max_millibel.to_db() as f64; let max_db = max_millibel.to_db() as f64;
let db_range = f64::abs(max_db - min_db); let mut db_range = f64::abs(max_db - min_db);
// Synchronize the volume control dB range with the mixer control, // Synchronize the volume control dB range with the mixer control,
// unless it was already set with a command line option. // unless it was already set with a command line option.
if !config.volume_ctrl.range_ok() { if !config.volume_ctrl.range_ok() {
if db_range > 100.0 {
debug!("Alsa mixer reported dB range > 100, which is suspect");
warn!("Please manually set `--volume-range` if this is incorrect");
}
config.volume_ctrl.set_db_range(db_range); config.volume_ctrl.set_db_range(db_range);
} else {
let db_range_override = config.volume_ctrl.db_range();
debug!(
"Alsa dB volume range was detected as {} but overridden as {}",
db_range, db_range_override
);
db_range = db_range_override;
} }
// For hardware controls with a small range (24 dB or less), // For hardware controls with a small range (24 dB or less),

View file

@ -47,6 +47,7 @@ use crate::SAMPLES_PER_SECOND;
const PRELOAD_NEXT_TRACK_BEFORE_END_DURATION_MS: u32 = 30000; const PRELOAD_NEXT_TRACK_BEFORE_END_DURATION_MS: u32 = 30000;
pub const DB_VOLTAGE_RATIO: f64 = 20.0; pub const DB_VOLTAGE_RATIO: f64 = 20.0;
pub const PCM_AT_0DBFS: f64 = 1.0;
// Spotify inserts a custom Ogg packet at the start with custom metadata values, that you would // Spotify inserts a custom Ogg packet at the start with custom metadata values, that you would
// otherwise expect in Vorbis comments. This packet isn't well-formed and players may balk at it. // otherwise expect in Vorbis comments. This packet isn't well-formed and players may balk at it.
@ -264,7 +265,6 @@ impl Default for NormalisationData {
impl NormalisationData { impl NormalisationData {
fn parse_from_ogg<T: Read + Seek>(mut file: T) -> io::Result<NormalisationData> { fn parse_from_ogg<T: Read + Seek>(mut file: T) -> io::Result<NormalisationData> {
const SPOTIFY_NORMALIZATION_HEADER_START_OFFSET: u64 = 144; const SPOTIFY_NORMALIZATION_HEADER_START_OFFSET: u64 = 144;
let newpos = file.seek(SeekFrom::Start(SPOTIFY_NORMALIZATION_HEADER_START_OFFSET))?; let newpos = file.seek(SeekFrom::Start(SPOTIFY_NORMALIZATION_HEADER_START_OFFSET))?;
if newpos != SPOTIFY_NORMALIZATION_HEADER_START_OFFSET { if newpos != SPOTIFY_NORMALIZATION_HEADER_START_OFFSET {
error!( error!(
@ -296,31 +296,62 @@ impl NormalisationData {
} }
let (gain_db, gain_peak) = if config.normalisation_type == NormalisationType::Album { let (gain_db, gain_peak) = if config.normalisation_type == NormalisationType::Album {
(data.album_gain_db as f64, data.album_peak as f64) (data.album_gain_db, data.album_peak)
} else { } else {
(data.track_gain_db as f64, data.track_peak as f64) (data.track_gain_db, data.track_peak)
}; };
let normalisation_power = gain_db + config.normalisation_pregain_db as f64; // As per the ReplayGain 1.0 & 2.0 (proposed) spec:
let mut normalisation_factor = db_to_ratio(normalisation_power); // https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification#Clipping_prevention
// https://wiki.hydrogenaud.io/index.php?title=ReplayGain_2.0_specification#Clipping_prevention
let normalisation_factor = if config.normalisation_method == NormalisationMethod::Basic {
// For Basic Normalisation, factor = min(ratio of (ReplayGain + PreGain), 1.0 / peak level).
// https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification#Peak_amplitude
// https://wiki.hydrogenaud.io/index.php?title=ReplayGain_2.0_specification#Peak_amplitude
// We then limit that to 1.0 as not to exceed dBFS (0.0 dB).
let factor = f64::min(
db_to_ratio(gain_db + config.normalisation_pregain_db),
PCM_AT_0DBFS / gain_peak,
);
if normalisation_power + ratio_to_db(gain_peak) > config.normalisation_threshold_dbfs { if factor > PCM_AT_0DBFS {
let limited_normalisation_factor = info!(
db_to_ratio(config.normalisation_threshold_dbfs as f64) / gain_peak; "Lowering gain by {:.2} dB for the duration of this track to avoid potentially exceeding dBFS.",
let limited_normalisation_power = ratio_to_db(limited_normalisation_factor); ratio_to_db(factor)
);
if config.normalisation_method == NormalisationMethod::Basic { PCM_AT_0DBFS
warn!("Limiting gain to {:.2} dB for the duration of this track to stay under normalisation threshold.", limited_normalisation_power);
normalisation_factor = limited_normalisation_factor;
} else { } else {
factor
}
} else {
// For Dynamic Normalisation it's up to the player to decide,
// factor = ratio of (ReplayGain + PreGain).
// We then let the dynamic limiter handle gain reduction.
let factor = db_to_ratio(gain_db + config.normalisation_pregain_db);
let threshold_ratio = db_to_ratio(config.normalisation_threshold_dbfs);
if factor > PCM_AT_0DBFS {
let factor_db = gain_db + config.normalisation_pregain_db;
let limiting_db = factor_db + config.normalisation_threshold_dbfs.abs();
warn!( warn!(
"This track will at its peak be subject to {:.2} dB of dynamic limiting.", "This track may exceed dBFS by {:.2} dB and be subject to {:.2} dB of dynamic limiting at it's peak.",
normalisation_power - limited_normalisation_power factor_db, limiting_db
);
} else if factor > threshold_ratio {
let limiting_db = gain_db
+ config.normalisation_pregain_db
+ config.normalisation_threshold_dbfs.abs();
info!(
"This track may be subject to {:.2} dB of dynamic limiting at it's peak.",
limiting_db
); );
} }
warn!("Please lower pregain to avoid."); factor
} };
debug!("Normalisation Data: {:?}", data); debug!("Normalisation Data: {:?}", data);
debug!( debug!(
@ -792,7 +823,16 @@ impl PlayerTrackLoader {
position_ms: u32, position_ms: u32,
) -> Option<PlayerLoadedTrackData> { ) -> Option<PlayerLoadedTrackData> {
let audio = match AudioItem::get_file(&self.session, spotify_id).await { let audio = match AudioItem::get_file(&self.session, spotify_id).await {
Ok(audio) => audio, Ok(audio) => match self.find_available_alternative(audio).await {
Some(audio) => audio,
None => {
warn!(
"<{}> is not available",
spotify_id.to_uri().unwrap_or_default()
);
return None;
}
},
Err(e) => { Err(e) => {
error!("Unable to load audio item: {:?}", e); error!("Unable to load audio item: {:?}", e);
return None; return None;
@ -805,6 +845,7 @@ impl PlayerTrackLoader {
); );
let is_explicit = audio.is_explicit; let is_explicit = audio.is_explicit;
if is_explicit { if is_explicit {
if let Some(value) = self.session.get_user_attribute("filter-explicit-content") { if let Some(value) = self.session.get_user_attribute("filter-explicit-content") {
if &value == "1" { if &value == "1" {
@ -814,22 +855,15 @@ impl PlayerTrackLoader {
} }
} }
let audio = match self.find_available_alternative(audio).await {
Some(audio) => audio,
None => {
error!("<{}> is not available", spotify_id.to_uri());
return None;
}
};
if audio.duration < 0 { if audio.duration < 0 {
error!( error!(
"Track duration for <{}> cannot be {}", "Track duration for <{}> cannot be {}",
spotify_id.to_uri(), spotify_id.to_uri().unwrap_or_default(),
audio.duration audio.duration
); );
return None; return None;
} }
let duration_ms = audio.duration as u32; let duration_ms = audio.duration as u32;
// (Most) podcasts seem to support only 96 kbps Ogg Vorbis, so fall back to it // (Most) podcasts seem to support only 96 kbps Ogg Vorbis, so fall back to it
@ -863,25 +897,23 @@ impl PlayerTrackLoader {
], ],
}; };
let entry = formats.iter().find_map(|format| { let (format, file_id) =
if let Some(&file_id) = audio.files.get(format) { match formats
Some((*format, file_id)) .iter()
} else { .find_map(|format| match audio.files.get(format) {
None Some(&file_id) => Some((*format, file_id)),
} _ => None,
}); }) {
Some(t) => t,
let (format, file_id) = match entry { None => {
Some(t) => t, warn!("<{}> is not available in any supported format", audio.name);
None => { return None;
error!("<{}> is not available in any supported format", audio.name); }
return None; };
}
};
let bytes_per_second = self.stream_data_rate(format); let bytes_per_second = self.stream_data_rate(format);
// This is only a loop to be able to reload the file if an error occured // This is only a loop to be able to reload the file if an error occurred
// while opening a cached file. // while opening a cached file.
loop { loop {
let encrypted_file = AudioFile::open(&self.session, file_id, bytes_per_second); let encrypted_file = AudioFile::open(&self.session, file_id, bytes_per_second);
@ -1416,73 +1448,98 @@ impl PlayerInternal {
// For the basic normalisation method, a normalisation factor of 1.0 indicates that // For the basic normalisation method, a normalisation factor of 1.0 indicates that
// there is nothing to normalise (all samples should pass unaltered). For the // there is nothing to normalise (all samples should pass unaltered). For the
// dynamic method, there may still be peaks that we want to shave off. // dynamic method, there may still be peaks that we want to shave off.
if self.config.normalisation if self.config.normalisation {
&& !(f64::abs(normalisation_factor - 1.0) <= f64::EPSILON if self.config.normalisation_method == NormalisationMethod::Basic
&& self.config.normalisation_method == NormalisationMethod::Basic) && normalisation_factor < 1.0
{ {
// zero-cost shorthands for sample in data.iter_mut() {
let threshold_db = self.config.normalisation_threshold_dbfs; *sample *= normalisation_factor;
let knee_db = self.config.normalisation_knee_db; }
let attack_cf = self.config.normalisation_attack_cf; } else if self.config.normalisation_method
let release_cf = self.config.normalisation_release_cf; == NormalisationMethod::Dynamic
{
// zero-cost shorthands
let threshold_db = self.config.normalisation_threshold_dbfs;
let knee_db = self.config.normalisation_knee_db;
let attack_cf = self.config.normalisation_attack_cf;
let release_cf = self.config.normalisation_release_cf;
for sample in data.iter_mut() { for sample in data.iter_mut() {
*sample *= normalisation_factor; // for both the basic and dynamic limiter *sample *= normalisation_factor;
// Feedforward limiter in the log domain // Feedforward limiter in the log domain
// After: Giannoulis, D., Massberg, M., & Reiss, J.D. (2012). Digital Dynamic // After: Giannoulis, D., Massberg, M., & Reiss, J.D. (2012). Digital Dynamic
// Range Compressor Design—A Tutorial and Analysis. Journal of The Audio // Range Compressor Design—A Tutorial and Analysis. Journal of The Audio
// Engineering Society, 60, 399-408. // Engineering Society, 60, 399-408.
if self.config.normalisation_method == NormalisationMethod::Dynamic
{
// steps 1 + 2: half-wave rectification and conversion into dB
let abs_sample_db = ratio_to_db(sample.abs());
// Some tracks have samples that are precisely 0.0, but ratio_to_db(0.0) // Some tracks have samples that are precisely 0.0. That's silence
// returns -inf and gets the peak detector stuck. // and we know we don't need to limit that, in which we can spare
if !abs_sample_db.is_normal() { // the CPU cycles.
continue; //
} // Also, calling `ratio_to_db(0.0)` returns `inf` and would get the
// peak detector stuck. Also catch the unlikely case where a sample
// is decoded as `NaN` or some other non-normal value.
let limiter_db = if sample.is_normal() {
// step 1-4: half-wave rectification and conversion into dB
// and gain computer with soft knee and subtractor
let bias_db = ratio_to_db(sample.abs()) - threshold_db;
let knee_boundary_db = bias_db * 2.0;
// step 3: gain computer with soft knee if knee_boundary_db < -knee_db {
let biased_sample = abs_sample_db - threshold_db; 0.0
let limited_sample = if 2.0 * biased_sample < -knee_db { } else if knee_boundary_db.abs() <= knee_db {
abs_sample_db // The textbook equation:
} else if 2.0 * biased_sample.abs() <= knee_db { // ratio_to_db(sample.abs()) - (ratio_to_db(sample.abs()) - (bias_db + knee_db / 2.0).powi(2) / (2.0 * knee_db))
abs_sample_db // Simplifies to:
- (biased_sample + knee_db / 2.0).powi(2) // ((2.0 * bias_db) + knee_db).powi(2) / (8.0 * knee_db)
/ (2.0 * knee_db) // Which in our case further simplifies to:
// (knee_boundary_db + knee_db).powi(2) / (8.0 * knee_db)
// because knee_boundary_db is 2.0 * bias_db.
(knee_boundary_db + knee_db).powi(2) / (8.0 * knee_db)
} else {
// Textbook:
// ratio_to_db(sample.abs()) - threshold_db, which is already our bias_db.
bias_db
}
} else { } else {
threshold_db as f64 0.0
}; };
// step 4: subtractor // Spare the CPU unless (1) the limiter is engaged, (2) we
let limiter_input = abs_sample_db - limited_sample; // were in attack or (3) we were in release, and that attack/
// release wasn't finished yet.
// Spare the CPU unless the limiter is active or we are riding a peak. if limiter_db > 0.0
if !(limiter_input > 0.0
|| self.normalisation_integrator > 0.0 || self.normalisation_integrator > 0.0
|| self.normalisation_peak > 0.0) || self.normalisation_peak > 0.0
{ {
continue; // step 5: smooth, decoupled peak detector
// Textbook:
// release_cf * self.normalisation_integrator + (1.0 - release_cf) * limiter_db
// Simplifies to:
// release_cf * self.normalisation_integrator - release_cf * limiter_db + limiter_db
self.normalisation_integrator = f64::max(
limiter_db,
release_cf * self.normalisation_integrator
- release_cf * limiter_db
+ limiter_db,
);
// Textbook:
// attack_cf * self.normalisation_peak + (1.0 - attack_cf) * self.normalisation_integrator
// Simplifies to:
// attack_cf * self.normalisation_peak - attack_cf * self.normalisation_integrator + self.normalisation_integrator
self.normalisation_peak = attack_cf
* self.normalisation_peak
- attack_cf * self.normalisation_integrator
+ self.normalisation_integrator;
// step 6: make-up gain applied later (volume attenuation)
// Applying the standard normalisation factor here won't work,
// because there are tracks with peaks as high as 6 dB above
// the default threshold, so that would clip.
// steps 7-8: conversion into level and multiplication into gain stage
*sample *= db_to_ratio(-self.normalisation_peak);
} }
// step 5: smooth, decoupled peak detector
self.normalisation_integrator = f64::max(
limiter_input,
release_cf * self.normalisation_integrator
+ (1.0 - release_cf) * limiter_input,
);
self.normalisation_peak = attack_cf * self.normalisation_peak
+ (1.0 - attack_cf) * self.normalisation_integrator;
// step 6: make-up gain applied later (volume attenuation)
// Applying the standard normalisation factor here won't work,
// because there are tracks with peaks as high as 6 dB above
// the default threshold, so that would clip.
// steps 7-8: conversion into level and multiplication into gain stage
*sample *= db_to_ratio(-self.normalisation_peak);
} }
} }
} }
@ -1981,15 +2038,8 @@ impl PlayerInternal {
} }
fn send_event(&mut self, event: PlayerEvent) { fn send_event(&mut self, event: PlayerEvent) {
let mut index = 0; self.event_senders
while index < self.event_senders.len() { .retain(|sender| sender.send(event.clone()).is_ok());
match self.event_senders[index].send(event.clone()) {
Ok(_) => index += 1,
Err(_) => {
self.event_senders.remove(index);
}
}
}
} }
fn load_track( fn load_track(

View file

@ -192,7 +192,7 @@ fn get_setup() -> Setup {
const VALID_INITIAL_VOLUME_RANGE: RangeInclusive<u16> = 0..=100; const VALID_INITIAL_VOLUME_RANGE: RangeInclusive<u16> = 0..=100;
const VALID_VOLUME_RANGE: RangeInclusive<f64> = 0.0..=100.0; const VALID_VOLUME_RANGE: RangeInclusive<f64> = 0.0..=100.0;
const VALID_NORMALISATION_KNEE_RANGE: RangeInclusive<f64> = 0.0..=10.0; const VALID_NORMALISATION_KNEE_RANGE: RangeInclusive<f64> = 0.0..=10.0;
const VALID_NORMALISATION_PREGAIN_RANGE: RangeInclusive<f32> = -10.0..=10.0; const VALID_NORMALISATION_PREGAIN_RANGE: RangeInclusive<f64> = -10.0..=10.0;
const VALID_NORMALISATION_THRESHOLD_RANGE: RangeInclusive<f64> = -10.0..=0.0; const VALID_NORMALISATION_THRESHOLD_RANGE: RangeInclusive<f64> = -10.0..=0.0;
const VALID_NORMALISATION_ATTACK_RANGE: RangeInclusive<u64> = 1..=500; const VALID_NORMALISATION_ATTACK_RANGE: RangeInclusive<u64> = 1..=500;
const VALID_NORMALISATION_RELEASE_RANGE: RangeInclusive<u64> = 1..=1000; const VALID_NORMALISATION_RELEASE_RANGE: RangeInclusive<u64> = 1..=1000;
@ -671,6 +671,7 @@ fn get_setup() -> Setup {
let opt = key.trim_start_matches('-'); let opt = key.trim_start_matches('-');
if index > 0 if index > 0
&& key.starts_with('-')
&& &args[index - 1] != key && &args[index - 1] != key
&& matches.opt_defined(opt) && matches.opt_defined(opt)
&& matches.opt_present(opt) && matches.opt_present(opt)
@ -1306,12 +1307,7 @@ fn get_setup() -> Setup {
normalisation_method = opt_str(NORMALISATION_METHOD) normalisation_method = opt_str(NORMALISATION_METHOD)
.as_deref() .as_deref()
.map(|method| { .map(|method| {
warn!( NormalisationMethod::from_str(method).unwrap_or_else(|_| {
"`--{}` / `-{}` will be deprecated in a future release.",
NORMALISATION_METHOD, NORMALISATION_METHOD_SHORT
);
let method = NormalisationMethod::from_str(method).unwrap_or_else(|_| {
invalid_error_msg( invalid_error_msg(
NORMALISATION_METHOD, NORMALISATION_METHOD,
NORMALISATION_METHOD_SHORT, NORMALISATION_METHOD_SHORT,
@ -1321,16 +1317,7 @@ fn get_setup() -> Setup {
); );
exit(1); exit(1);
}); })
if matches!(method, NormalisationMethod::Basic) {
warn!(
"`--{}` / `-{}` {:?} will be deprecated in a future release.",
NORMALISATION_METHOD, NORMALISATION_METHOD_SHORT, method
);
}
method
}) })
.unwrap_or(player_default_config.normalisation_method); .unwrap_or(player_default_config.normalisation_method);
@ -1352,7 +1339,7 @@ fn get_setup() -> Setup {
.unwrap_or(player_default_config.normalisation_type); .unwrap_or(player_default_config.normalisation_type);
normalisation_pregain_db = opt_str(NORMALISATION_PREGAIN) normalisation_pregain_db = opt_str(NORMALISATION_PREGAIN)
.map(|pregain| match pregain.parse::<f32>() { .map(|pregain| match pregain.parse::<f64>() {
Ok(value) if (VALID_NORMALISATION_PREGAIN_RANGE).contains(&value) => value, Ok(value) if (VALID_NORMALISATION_PREGAIN_RANGE).contains(&value) => value,
_ => { _ => {
let valid_values = &format!( let valid_values = &format!(

View file

@ -1,59 +1,116 @@
use log::info;
use std::{ use std::{
collections::HashMap, collections::HashMap,
io, io::{Error, ErrorKind, Result},
process::{Command, ExitStatus}, process::{Command, ExitStatus},
}; };
use log::info;
use tokio::process::{Child as AsyncChild, Command as AsyncCommand}; use tokio::process::{Child as AsyncChild, Command as AsyncCommand};
use librespot::playback::player::{PlayerEvent, SinkStatus}; use librespot::playback::player::{PlayerEvent, SinkStatus};
pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<io::Result<AsyncChild>> { pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<Result<AsyncChild>> {
let mut env_vars = HashMap::new(); let mut env_vars = HashMap::new();
match event { match event {
PlayerEvent::Changed { PlayerEvent::Changed {
old_track_id, old_track_id,
new_track_id, new_track_id,
} => { } => match old_track_id.to_base62() {
env_vars.insert("PLAYER_EVENT", "changed".to_string()); Err(_) => {
env_vars.insert("OLD_TRACK_ID", old_track_id.to_base62()); return Some(Err(Error::new(
env_vars.insert("TRACK_ID", new_track_id.to_base62()); ErrorKind::InvalidData,
} "PlayerEvent::Changed: Invalid old track id",
PlayerEvent::Started { track_id, .. } => { )))
env_vars.insert("PLAYER_EVENT", "started".to_string()); }
env_vars.insert("TRACK_ID", track_id.to_base62()); Ok(old_id) => match new_track_id.to_base62() {
} Err(_) => {
PlayerEvent::Stopped { track_id, .. } => { return Some(Err(Error::new(
env_vars.insert("PLAYER_EVENT", "stopped".to_string()); ErrorKind::InvalidData,
env_vars.insert("TRACK_ID", track_id.to_base62()); "PlayerEvent::Changed: Invalid new track id",
} )))
}
Ok(new_id) => {
env_vars.insert("PLAYER_EVENT", "changed".to_string());
env_vars.insert("OLD_TRACK_ID", old_id);
env_vars.insert("TRACK_ID", new_id);
}
},
},
PlayerEvent::Started { track_id, .. } => match track_id.to_base62() {
Err(_) => {
return Some(Err(Error::new(
ErrorKind::InvalidData,
"PlayerEvent::Started: Invalid track id",
)))
}
Ok(id) => {
env_vars.insert("PLAYER_EVENT", "started".to_string());
env_vars.insert("TRACK_ID", id);
}
},
PlayerEvent::Stopped { track_id, .. } => match track_id.to_base62() {
Err(_) => {
return Some(Err(Error::new(
ErrorKind::InvalidData,
"PlayerEvent::Stopped: Invalid track id",
)))
}
Ok(id) => {
env_vars.insert("PLAYER_EVENT", "stopped".to_string());
env_vars.insert("TRACK_ID", id);
}
},
PlayerEvent::Playing { PlayerEvent::Playing {
track_id, track_id,
duration_ms, duration_ms,
position_ms, position_ms,
.. ..
} => { } => match track_id.to_base62() {
env_vars.insert("PLAYER_EVENT", "playing".to_string()); Err(_) => {
env_vars.insert("TRACK_ID", track_id.to_base62()); return Some(Err(Error::new(
env_vars.insert("DURATION_MS", duration_ms.to_string()); ErrorKind::InvalidData,
env_vars.insert("POSITION_MS", position_ms.to_string()); "PlayerEvent::Playing: Invalid track id",
} )))
}
Ok(id) => {
env_vars.insert("PLAYER_EVENT", "playing".to_string());
env_vars.insert("TRACK_ID", id);
env_vars.insert("DURATION_MS", duration_ms.to_string());
env_vars.insert("POSITION_MS", position_ms.to_string());
}
},
PlayerEvent::Paused { PlayerEvent::Paused {
track_id, track_id,
duration_ms, duration_ms,
position_ms, position_ms,
.. ..
} => { } => match track_id.to_base62() {
env_vars.insert("PLAYER_EVENT", "paused".to_string()); Err(_) => {
env_vars.insert("TRACK_ID", track_id.to_base62()); return Some(Err(Error::new(
env_vars.insert("DURATION_MS", duration_ms.to_string()); ErrorKind::InvalidData,
env_vars.insert("POSITION_MS", position_ms.to_string()); "PlayerEvent::Paused: Invalid track id",
} )))
PlayerEvent::Preloading { track_id, .. } => { }
env_vars.insert("PLAYER_EVENT", "preloading".to_string()); Ok(id) => {
env_vars.insert("TRACK_ID", track_id.to_base62()); env_vars.insert("PLAYER_EVENT", "paused".to_string());
} env_vars.insert("TRACK_ID", id);
env_vars.insert("DURATION_MS", duration_ms.to_string());
env_vars.insert("POSITION_MS", position_ms.to_string());
}
},
PlayerEvent::Preloading { track_id, .. } => match track_id.to_base62() {
Err(_) => {
return Some(Err(Error::new(
ErrorKind::InvalidData,
"PlayerEvent::Preloading: Invalid track id",
)))
}
Ok(id) => {
env_vars.insert("PLAYER_EVENT", "preloading".to_string());
env_vars.insert("TRACK_ID", id);
}
},
PlayerEvent::VolumeSet { volume } => { PlayerEvent::VolumeSet { volume } => {
env_vars.insert("PLAYER_EVENT", "volume_set".to_string()); env_vars.insert("PLAYER_EVENT", "volume_set".to_string());
env_vars.insert("VOLUME", volume.to_string()); env_vars.insert("VOLUME", volume.to_string());
@ -71,7 +128,7 @@ pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<io::Re
) )
} }
pub fn emit_sink_event(sink_status: SinkStatus, onevent: &str) -> io::Result<ExitStatus> { pub fn emit_sink_event(sink_status: SinkStatus, onevent: &str) -> Result<ExitStatus> {
let mut env_vars = HashMap::new(); let mut env_vars = HashMap::new();
env_vars.insert("PLAYER_EVENT", "sink".to_string()); env_vars.insert("PLAYER_EVENT", "sink".to_string());
let sink_status = match sink_status { let sink_status = match sink_status {