Merge pull request #816 from roderickvd/new-api-client

Lay groundwork for new Spotify API client
This commit is contained in:
Roderick van Domburg 2021-06-28 22:08:50 +02:00 committed by GitHub
commit f99f336a6a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
62 changed files with 3101 additions and 1837 deletions

View file

@ -11,6 +11,11 @@ on:
"Cargo.lock",
"rustfmt.toml",
".github/workflows/*",
"!*.md",
"!contrib/*",
"!docs/*",
"!LICENSE",
"!*.sh",
]
pull_request:
paths:
@ -20,6 +25,11 @@ on:
"Cargo.lock",
"rustfmt.toml",
".github/workflows/*",
"!*.md",
"!contrib/*",
"!docs/*",
"!LICENSE",
"!*.sh",
]
schedule:
# Run CI every week
@ -99,8 +109,8 @@ jobs:
- run: cargo hack --workspace --remove-dev-deps
- run: cargo build -p librespot-core --no-default-features
- run: cargo build -p librespot-core
- run: cargo build -p librespot-connect
- run: cargo build -p librespot-connect --no-default-features --features with-dns-sd
- run: cargo hack build --each-feature -p librespot-discovery
- run: cargo hack build --each-feature -p librespot-playback
- run: cargo hack build --each-feature
test-windows:

View file

@ -6,14 +6,44 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) since v0.2.0.
## [Unreleased]
### Added
- [discovery] The crate `librespot-discovery` for discovery in LAN was created. Its functionality was previously part of `librespot-connect`.
- [playback] Add support for dithering with `--dither` for lower requantization error (breaking)
- [playback] Add `--volume-range` option to set dB range and control `log` and `cubic` volume control curves
- [playback] `alsamixer`: support for querying dB range from Alsa softvol
- [playback] Add `--format F64` (supported by Alsa and GStreamer only)
### Changed
- [audio, playback] Moved `VorbisDecoder`, `VorbisError`, `AudioPacket`, `PassthroughDecoder`, `PassthroughError`, `AudioError`, `AudioDecoder` and the `convert` module from `librespot-audio` to `librespot-playback`. The underlying crates `vorbis`, `librespot-tremor`, `lewton` and `ogg` should be used directly. (breaking)
- [audio, playback] Use `Duration` for time constants and functions (breaking)
- [connect, playback] Moved volume controls from `librespot-connect` to `librespot-playback` crate
- [connect] Synchronize player volume with mixer volume on playback
- [playback] Store and pass samples in 64-bit floating point
- [playback] Make cubic volume control available to all mixers with `--volume-ctrl cubic`
- [playback] Normalize volumes to `[0.0..1.0]` instead of `[0..65535]` for greater precision and performance (breaking)
- [playback] `alsamixer`: complete rewrite (breaking)
- [playback] `alsamixer`: query card dB range for the `log` volume control unless specified otherwise
- [playback] `alsamixer`: use `--device` name for `--mixer-card` unless specified otherwise
- [playback] `player`: consider errors in `sink.start`, `sink.stop` and `sink.write` fatal and `exit(1)` (breaking)
### Deprecated
- [connect] The `discovery` module was deprecated in favor of the `librespot-discovery` crate
### Removed
* [librespot-audio] Removed `VorbisDecoder`, `VorbisError`, `AudioPacket`, `PassthroughDecoder`, `PassthroughError`, `AudioError`, `AudioDecoder` and the `convert` module from `librespot_audio`. The underlying crates `vorbis`, `librespot-tremor`, `lewton` and `ogg` should be used directly.
- [connect] Removed no-op mixer started/stopped logic (breaking)
- [playback] Removed `with-vorbis` and `with-tremor` features
- [playback] `alsamixer`: removed `--mixer-linear-volume` option; use `--volume-ctrl linear` instead
### Fixed
* [librespot-playback] Incorrect `PlayerConfig::default().normalisation_threshold` caused distortion when using dynamic volume normalisation downstream
- [connect] Fix step size on volume up/down events
- [playback] Incorrect `PlayerConfig::default().normalisation_threshold` caused distortion when using dynamic volume normalisation downstream
- [playback] Fix `log` and `cubic` volume controls to be mute at zero volume
- [playback] Fix `S24_3` format on big-endian systems
- [playback] `alsamixer`: make `cubic` consistent between cards that report minimum volume as mute, and cards that report some dB value
- [playback] `alsamixer`: make `--volume-ctrl {linear|log}` work as expected
- [playback] `alsa`, `gstreamer`, `pulseaudio`: always output in native endianness
- [playback] `alsa`: revert buffer size to ~500 ms
- [playback] `alsa`, `pipe`: better error handling
## [0.2.0] - 2021-05-04

280
Cargo.lock generated
View file

@ -1,7 +1,5 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aes"
version = "0.6.0"
@ -170,9 +168,9 @@ checksum = "b700ce4376041dcd0a327fd0097c41095743c4c8af8887265942faf1100bd040"
[[package]]
name = "cc"
version = "1.0.67"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd"
checksum = "4a72c244c1ff497a746a7e1fb3d14bd08420ecda70c8f25c7112f2781652d787"
dependencies = [
"jobserver",
]
@ -237,6 +235,17 @@ dependencies = [
"libloading 0.7.0",
]
[[package]]
name = "colored"
version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4ffc801dacf156c5854b9df4f425a626539c3a6ef7893cc0c5084a23f0b6c59"
dependencies = [
"atty",
"lazy_static",
"winapi",
]
[[package]]
name = "combine"
version = "4.5.2"
@ -300,9 +309,9 @@ dependencies = [
[[package]]
name = "cpufeatures"
version = "0.1.1"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dec1028182c380cc45a2e2c5ec841134f2dfd0f8f5f0a5bcd68004f81b5efdf4"
checksum = "ed00c67cb5d0a7d64a44f6ad2668db7e7530311dd53ea79bcd4fb022c64911c8"
dependencies = [
"libc",
]
@ -428,9 +437,9 @@ dependencies = [
[[package]]
name = "futures"
version = "0.3.14"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9d5813545e459ad3ca1bff9915e9ad7f1a47dc6a91b627ce321d5863b7dd253"
checksum = "0e7e43a803dae2fa37c1f6a8fe121e1f7bf9548b4dfc0522a42f34145dadfc27"
dependencies = [
"futures-channel",
"futures-core",
@ -520,12 +529,6 @@ dependencies = [
"slab",
]
[[package]]
name = "gcc"
version = "0.3.55"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2"
[[package]]
name = "generic-array"
version = "0.14.4"
@ -547,9 +550,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.2"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8"
checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
dependencies = [
"cfg-if 1.0.0",
"libc",
@ -635,7 +638,7 @@ dependencies = [
"gstreamer-sys",
"libc",
"muldiv",
"num-rational",
"num-rational 0.3.2",
"once_cell",
"paste",
"pretty-hex",
@ -816,15 +819,15 @@ dependencies = [
[[package]]
name = "httparse"
version = "1.4.0"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1ce40d6fc9764887c2fdc7305c3dcc429ba11ff981c1509416afd5697e4437"
checksum = "f3a87b616e37e93c22fb19bcd386f02f3af5ea98a25670ad0fce773de23c5e68"
[[package]]
name = "httpdate"
version = "1.0.0"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05842d0d43232b23ccb7060ecb0f0626922c21f30012e97b767b30afd4a5d4b9"
checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440"
[[package]]
name = "humantime"
@ -834,9 +837,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.7"
version = "0.14.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e5f105c494081baa3bf9e200b279e27ec1623895cd504c7dbef8d0b080fcf54"
checksum = "d3f71a7eea53a3f8257a7b4795373ff886397178cd634430ea94e12d7fe4fe34"
dependencies = [
"bytes",
"futures-channel",
@ -1049,9 +1052,9 @@ dependencies = [
[[package]]
name = "libc"
version = "0.2.94"
version = "0.2.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18794a8ad5b29321f790b55d93dfba91e125cb1a9edbd4f8e3150acc771c1a5e"
checksum = "789da6d93f1b866ffe175afc5322a4d76c038605a1c3319bb57b06967ca98a36"
[[package]]
name = "libloading"
@ -1073,6 +1076,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "libm"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a"
[[package]]
name = "libmdns"
version = "0.6.1"
@ -1152,6 +1161,7 @@ dependencies = [
"librespot-audio",
"librespot-connect",
"librespot-core",
"librespot-discovery",
"librespot-metadata",
"librespot-playback",
"librespot-protocol",
@ -1181,16 +1191,10 @@ dependencies = [
name = "librespot-connect"
version = "0.2.0"
dependencies = [
"aes-ctr",
"base64",
"dns-sd",
"form_urlencoded",
"futures-core",
"futures-util",
"hmac",
"hyper",
"libmdns",
"librespot-core",
"librespot-discovery",
"librespot-playback",
"librespot-protocol",
"log",
@ -1198,10 +1202,8 @@ dependencies = [
"rand",
"serde",
"serde_json",
"sha-1",
"tokio",
"tokio-stream",
"url",
]
[[package]]
@ -1223,7 +1225,9 @@ dependencies = [
"hyper-proxy",
"librespot-protocol",
"log",
"num",
"num-bigint",
"num-derive",
"num-integer",
"num-traits",
"once_cell",
@ -1245,6 +1249,31 @@ dependencies = [
"vergen",
]
[[package]]
name = "librespot-discovery"
version = "0.2.0"
dependencies = [
"aes-ctr",
"base64",
"cfg-if 1.0.0",
"dns-sd",
"form_urlencoded",
"futures",
"futures-core",
"hex",
"hmac",
"hyper",
"libmdns",
"librespot-core",
"log",
"rand",
"serde_json",
"sha-1",
"simple_logger",
"thiserror",
"tokio",
]
[[package]]
name = "librespot-metadata"
version = "0.2.0"
@ -1263,7 +1292,6 @@ version = "0.2.0"
dependencies = [
"alsa",
"byteorder",
"cfg-if 1.0.0",
"cpal",
"futures-executor",
"futures-util",
@ -1277,16 +1305,16 @@ dependencies = [
"librespot-audio",
"librespot-core",
"librespot-metadata",
"librespot-tremor",
"log",
"ogg",
"portaudio-rs",
"rand",
"rand_distr",
"rodio",
"sdl2",
"shell-words",
"thiserror",
"tokio",
"vorbis",
"zerocopy",
]
@ -1299,18 +1327,6 @@ dependencies = [
"protobuf-codegen-pure",
]
[[package]]
name = "librespot-tremor"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97f525bff915d478a76940a7b988e5ea34911ba7280c97bd3a7673f54d68b4fe"
dependencies = [
"cc",
"libc",
"ogg-sys",
"pkg-config",
]
[[package]]
name = "lock_api"
version = "0.4.4"
@ -1475,6 +1491,20 @@ dependencies = [
"winapi",
]
[[package]]
name = "num"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational 0.4.0",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.0"
@ -1487,6 +1517,15 @@ dependencies = [
"rand",
]
[[package]]
name = "num-complex"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26873667bbbb7c5182d4a37c1add32cdf09f841af72da53318fdb81543c15085"
dependencies = [
"num-traits",
]
[[package]]
name = "num-derive"
version = "0.3.3"
@ -1508,6 +1547,17 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-iter"
version = "0.1.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2021c8337a54d21aca0d59a92577a029af9431cb59b909b03252b9c164fad59"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.3.2"
@ -1519,6 +1569,18 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a"
dependencies = [
"autocfg",
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.14"
@ -1526,6 +1588,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
dependencies = [
"autocfg",
"libm",
]
[[package]]
@ -1562,9 +1625,9 @@ dependencies = [
[[package]]
name = "oboe"
version = "0.4.1"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4cfb2390bddb9546c0f7448fd1d2abdd39e6075206f960991eb28c7fa7f126c4"
checksum = "dfa187b38ae20374617b7ad418034ed3dc90ac980181d211518bd03537ae8f8d"
dependencies = [
"jni",
"ndk",
@ -1576,9 +1639,9 @@ dependencies = [
[[package]]
name = "oboe-sys"
version = "0.4.0"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe069264d082fc820dfa172f79be3f2e088ecfece9b1c47b0c9fd838d2bef103"
checksum = "b88e64835aa3f579c08d182526dc34e3907343d5b97e87b71a40ba5bca7aca9e"
dependencies = [
"cc",
]
@ -1592,17 +1655,6 @@ dependencies = [
"byteorder",
]
[[package]]
name = "ogg-sys"
version = "0.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a95b8c172e17df1a41bf8d666301d3b2c4efeb90d9d0415e2a4dc0668b35fdb2"
dependencies = [
"gcc",
"libc",
"pkg-config",
]
[[package]]
name = "once_cell"
version = "1.7.2"
@ -1805,9 +1857,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
[[package]]
name = "proc-macro2"
version = "1.0.26"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec"
checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"
dependencies = [
"unicode-xid",
]
@ -1877,6 +1929,16 @@ dependencies = [
"getrandom",
]
[[package]]
name = "rand_distr"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da9e8f32ad24fb80d07d2323a9a2ce8b30d68a62b8cb4df88119ff49a698f038"
dependencies = [
"num-traits",
"rand",
]
[[package]]
name = "rand_hc"
version = "0.3.0"
@ -2057,18 +2119,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.125"
version = "1.0.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171"
checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.125"
version = "1.0.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d"
checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43"
dependencies = [
"proc-macro2",
"quote",
@ -2129,6 +2191,19 @@ dependencies = [
"libc",
]
[[package]]
name = "simple_logger"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd57f17c093ead1d4a1499dc9acaafdd71240908d64775465543b8d9a9f1d198"
dependencies = [
"atty",
"chrono",
"colored",
"log",
"winapi",
]
[[package]]
name = "slab"
version = "0.4.3"
@ -2256,18 +2331,18 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.24"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
checksum = "fa6f76457f59514c7eeb4e59d891395fab0b2fd1d40723ae737d64153392e9c6"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.24"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
checksum = "8a36768c0fbf1bb15eca10defa29526bda730a2376c2ab4393ccfa16fb1a318d"
dependencies = [
"proc-macro2",
"quote",
@ -2301,9 +2376,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
[[package]]
name = "tokio"
version = "1.5.0"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83f0c8e7c0addab50b663055baf787d0af7f413a46e6e7fb9559a4e4db7137a5"
checksum = "bd3076b5c8cc18138b8f8814895c11eb4de37114a5d127bafdc5e55798ceef37"
dependencies = [
"autocfg",
"bytes",
@ -2320,9 +2395,9 @@ dependencies = [
[[package]]
name = "tokio-macros"
version = "1.1.0"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "caf7b11a536f46a809a8a9f0bb4237020f70ecbf115b842360afb127ea2fda57"
checksum = "c49e3df43841dafb86046472506755d8501c5615673955f6aa17181125d13c37"
dependencies = [
"proc-macro2",
"quote",
@ -2342,9 +2417,9 @@ dependencies = [
[[package]]
name = "tokio-stream"
version = "0.1.5"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e177a5d8c3bf36de9ebe6d58537d8879e964332f93fb3339e43f618c81361af0"
checksum = "f8864d706fdb3cc0843a49647ac892720dac98a6eeb818b77190592cf4994066"
dependencies = [
"futures-core",
"pin-project-lite",
@ -2370,9 +2445,9 @@ dependencies = [
[[package]]
name = "tokio-util"
version = "0.6.6"
version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "940a12c99365c31ea8dd9ba04ec1be183ffe4920102bb7122c2f515437601e8e"
checksum = "1caa0b0c8d94a049db56b5acf8cba99dc0623aab1b26d5b5f5e2d945846b3592"
dependencies = [
"bytes",
"futures-core",
@ -2550,43 +2625,6 @@ version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
[[package]]
name = "vorbis"
version = "0.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e8a194457075360557b82dac78f7ca2d65bbb6679bccfabae5f7c8c706cc776"
dependencies = [
"libc",
"ogg-sys",
"vorbis-sys",
"vorbisfile-sys",
]
[[package]]
name = "vorbis-sys"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd9ed6ef5361a85e68ccc005961d995c2d44e31f0816f142025f2ca2383dfbfd"
dependencies = [
"cc",
"libc",
"ogg-sys",
"pkg-config",
]
[[package]]
name = "vorbisfile-sys"
version = "0.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f4306d7e1ac4699b55e20de9483750b90c250913188efd7484db6bfbe9042d1"
dependencies = [
"gcc",
"libc",
"ogg-sys",
"pkg-config",
"vorbis-sys",
]
[[package]]
name = "walkdir"
version = "2.3.2"
@ -2670,9 +2708,9 @@ checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f"
[[package]]
name = "web-sys"
version = "0.3.50"
version = "0.3.51"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be"
checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582"
dependencies = [
"js-sys",
"wasm-bindgen",

View file

@ -32,6 +32,10 @@ version = "0.2.0"
path = "core"
version = "0.2.0"
[dependencies.librespot-discovery]
path = "discovery"
version = "0.2.0"
[dependencies.librespot-metadata]
path = "metadata"
version = "0.2.0"
@ -68,10 +72,7 @@ rodiojack-backend = ["librespot-playback/rodiojack-backend"]
sdl-backend = ["librespot-playback/sdl-backend"]
gstreamer-backend = ["librespot-playback/gstreamer-backend"]
with-tremor = ["librespot-playback/with-tremor"]
with-vorbis = ["librespot-playback/with-vorbis"]
with-dns-sd = ["librespot-connect/with-dns-sd"]
with-dns-sd = ["librespot-discovery/with-dns-sd"]
default = ["rodio-backend"]
@ -89,5 +90,6 @@ section = "sound"
priority = "optional"
assets = [
["target/release/librespot", "usr/bin/", "755"],
["contrib/librespot.service", "lib/systemd/system/", "644"]
["contrib/librespot.service", "lib/systemd/system/", "644"],
["contrib/librespot.user.service", "lib/systemd/user/", "644"]
]

View file

@ -89,6 +89,7 @@ The above command will create a receiver named ```Librespot```, with bitrate set
A full list of runtime options are available [here](https://github.com/librespot-org/librespot/wiki/Options)
_Please Note: When using the cache feature, an authentication blob is stored for your account in the cache directory. For security purposes, we recommend that you set directory permissions on the cache directory to `700`._
## Contact
Come and hang out on gitter if you need help or want to offer some.
https://gitter.im/librespot-org/spotify-connect-resources

View file

@ -18,70 +18,70 @@ use tokio::sync::{mpsc, oneshot};
use self::receive::{audio_file_fetch, request_range};
use crate::range_set::{Range, RangeSet};
/// The minimum size of a block that is requested from the Spotify servers in one request.
/// This is the block size that is typically requested while doing a `seek()` on a file.
/// Note: smaller requests can happen if part of the block is downloaded already.
const MINIMUM_DOWNLOAD_SIZE: usize = 1024 * 16;
// The minimum size of a block that is requested from the Spotify servers in one request.
// This is the block size that is typically requested while doing a seek() on a file.
// Note: smaller requests can happen if part of the block is downloaded already.
/// The amount of data that is requested when initially opening a file.
/// Note: if the file is opened to play from the beginning, the amount of data to
/// read ahead is requested in addition to this amount. If the file is opened to seek to
/// another position, then only this amount is requested on the first request.
const INITIAL_DOWNLOAD_SIZE: usize = 1024 * 16;
// The amount of data that is requested when initially opening a file.
// Note: if the file is opened to play from the beginning, the amount of data to
// read ahead is requested in addition to this amount. If the file is opened to seek to
// another position, then only this amount is requested on the first request.
const INITIAL_PING_TIME_ESTIMATE_SECONDS: f64 = 0.5;
// The pig time that is used for calculations before a ping time was actually measured.
/// The ping time that is used for calculations before a ping time was actually measured.
const INITIAL_PING_TIME_ESTIMATE: Duration = Duration::from_millis(500);
const MAXIMUM_ASSUMED_PING_TIME_SECONDS: f64 = 1.5;
// If the measured ping time to the Spotify server is larger than this value, it is capped
// to avoid run-away block sizes and pre-fetching.
/// If the measured ping time to the Spotify server is larger than this value, it is capped
/// to avoid run-away block sizes and pre-fetching.
const MAXIMUM_ASSUMED_PING_TIME: Duration = Duration::from_millis(1500);
pub const READ_AHEAD_BEFORE_PLAYBACK_SECONDS: f64 = 1.0;
// Before playback starts, this many seconds of data must be present.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
/// Before playback starts, this many seconds of data must be present.
/// Note: the calculations are done using the nominal bitrate of the file. The actual amount
/// of audio data may be larger or smaller.
pub const READ_AHEAD_BEFORE_PLAYBACK: Duration = Duration::from_secs(1);
pub const READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS: f64 = 2.0;
// Same as READ_AHEAD_BEFORE_PLAYBACK_SECONDS, but the time is taken as a factor of the ping
// time to the Spotify server.
// Both, READ_AHEAD_BEFORE_PLAYBACK_SECONDS and READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS are
// obeyed.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
/// Same as `READ_AHEAD_BEFORE_PLAYBACK`, but the time is taken as a factor of the ping
/// time to the Spotify server. Both `READ_AHEAD_BEFORE_PLAYBACK` and
/// `READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS` are obeyed.
/// Note: the calculations are done using the nominal bitrate of the file. The actual amount
/// of audio data may be larger or smaller.
pub const READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS: f32 = 2.0;
pub const READ_AHEAD_DURING_PLAYBACK_SECONDS: f64 = 5.0;
// While playing back, this many seconds of data ahead of the current read position are
// requested.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
/// While playing back, this many seconds of data ahead of the current read position are
/// requested.
/// Note: the calculations are done using the nominal bitrate of the file. The actual amount
/// of audio data may be larger or smaller.
pub const READ_AHEAD_DURING_PLAYBACK: Duration = Duration::from_secs(5);
pub const READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS: f64 = 10.0;
// Same as READ_AHEAD_DURING_PLAYBACK_SECONDS, but the time is taken as a factor of the ping
// time to the Spotify server.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
/// Same as `READ_AHEAD_DURING_PLAYBACK`, but the time is taken as a factor of the ping
/// time to the Spotify server.
/// Note: the calculations are done using the nominal bitrate of the file. The actual amount
/// of audio data may be larger or smaller.
pub const READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS: f32 = 10.0;
const PREFETCH_THRESHOLD_FACTOR: f64 = 4.0;
// If the amount of data that is pending (requested but not received) is less than a certain amount,
// data is pre-fetched in addition to the read ahead settings above. The threshold for requesting more
// data is calculated as
// <pending bytes> < PREFETCH_THRESHOLD_FACTOR * <ping time> * <nominal data rate>
/// If the amount of data that is pending (requested but not received) is less than a certain amount,
/// data is pre-fetched in addition to the read ahead settings above. The threshold for requesting more
/// data is calculated as `<pending bytes> < PREFETCH_THRESHOLD_FACTOR * <ping time> * <nominal data rate>`
const PREFETCH_THRESHOLD_FACTOR: f32 = 4.0;
const FAST_PREFETCH_THRESHOLD_FACTOR: f64 = 1.5;
// Similar to PREFETCH_THRESHOLD_FACTOR, but it also takes the current download rate into account.
// The formula used is
// <pending bytes> < FAST_PREFETCH_THRESHOLD_FACTOR * <ping time> * <measured download rate>
// This mechanism allows for fast downloading of the remainder of the file. The number should be larger
// than 1 so the download rate ramps up until the bandwidth is saturated. The larger the value, the faster
// the download rate ramps up. However, this comes at the cost that it might hurt ping-time if a seek is
// performed while downloading. Values smaller than 1 cause the download rate to collapse and effectively
// only PREFETCH_THRESHOLD_FACTOR is in effect. Thus, set to zero if bandwidth saturation is not wanted.
/// Similar to `PREFETCH_THRESHOLD_FACTOR`, but it also takes the current download rate into account.
/// The formula used is `<pending bytes> < FAST_PREFETCH_THRESHOLD_FACTOR * <ping time> * <measured download rate>`
/// This mechanism allows for fast downloading of the remainder of the file. The number should be larger
/// than `1.0` so the download rate ramps up until the bandwidth is saturated. The larger the value, the faster
/// the download rate ramps up. However, this comes at the cost that it might hurt ping time if a seek is
/// performed while downloading. Values smaller than `1.0` cause the download rate to collapse and effectively
/// only `PREFETCH_THRESHOLD_FACTOR` is in effect. Thus, set to `0.0` if bandwidth saturation is not wanted.
const FAST_PREFETCH_THRESHOLD_FACTOR: f32 = 1.5;
/// Limit the number of requests that are pending simultaneously before pre-fetching data. Pending
/// requests share bandwidth. Thus, havint too many requests can lead to the one that is needed next
/// for playback to be delayed leading to a buffer underrun. This limit has the effect that a new
/// pre-fetch request is only sent if less than `MAX_PREFETCH_REQUESTS` are pending.
const MAX_PREFETCH_REQUESTS: usize = 4;
// Limit the number of requests that are pending simultaneously before pre-fetching data. Pending
// requests share bandwidth. Thus, havint too many requests can lead to the one that is needed next
// for playback to be delayed leading to a buffer underrun. This limit has the effect that a new
// pre-fetch request is only sent if less than MAX_PREFETCH_REQUESTS are pending.
/// The time we will wait to obtain status updates on downloading.
const DOWNLOAD_TIMEOUT: Duration = Duration::from_secs(1);
pub enum AudioFile {
Cached(fs::File),
@ -131,10 +131,10 @@ impl StreamLoaderController {
})
}
pub fn ping_time_ms(&self) -> usize {
self.stream_shared.as_ref().map_or(0, |shared| {
shared.ping_time_ms.load(atomic::Ordering::Relaxed)
})
pub fn ping_time(&self) -> Duration {
Duration::from_millis(self.stream_shared.as_ref().map_or(0, |shared| {
shared.ping_time_ms.load(atomic::Ordering::Relaxed) as u64
}))
}
fn send_stream_loader_command(&self, command: StreamLoaderCommand) {
@ -170,7 +170,7 @@ impl StreamLoaderController {
{
download_status = shared
.cond
.wait_timeout(download_status, Duration::from_millis(1000))
.wait_timeout(download_status, DOWNLOAD_TIMEOUT)
.unwrap()
.0;
if range.length
@ -271,10 +271,10 @@ impl AudioFile {
let mut initial_data_length = if play_from_beginning {
INITIAL_DOWNLOAD_SIZE
+ max(
(READ_AHEAD_DURING_PLAYBACK_SECONDS * bytes_per_second as f64) as usize,
(INITIAL_PING_TIME_ESTIMATE_SECONDS
(READ_AHEAD_DURING_PLAYBACK.as_secs_f32() * bytes_per_second as f32) as usize,
(INITIAL_PING_TIME_ESTIMATE.as_secs_f32()
* READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* bytes_per_second as f64) as usize,
* bytes_per_second as f32) as usize,
)
} else {
INITIAL_DOWNLOAD_SIZE
@ -368,7 +368,7 @@ impl AudioFileStreaming {
let read_file = write_file.reopen().unwrap();
//let (seek_tx, seek_rx) = mpsc::unbounded();
// let (seek_tx, seek_rx) = mpsc::unbounded();
let (stream_loader_command_tx, stream_loader_command_rx) =
mpsc::unbounded_channel::<StreamLoaderCommand>();
@ -405,17 +405,19 @@ impl Read for AudioFileStreaming {
let length_to_request = match *(self.shared.download_strategy.lock().unwrap()) {
DownloadStrategy::RandomAccess() => length,
DownloadStrategy::Streaming() => {
// Due to the read-ahead stuff, we potentially request more than the actual reqeust demanded.
let ping_time_seconds =
0.0001 * self.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as f64;
// Due to the read-ahead stuff, we potentially request more than the actual request demanded.
let ping_time_seconds = Duration::from_millis(
self.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as u64,
)
.as_secs_f32();
let length_to_request = length
+ max(
(READ_AHEAD_DURING_PLAYBACK_SECONDS * self.shared.stream_data_rate as f64)
as usize,
(READ_AHEAD_DURING_PLAYBACK.as_secs_f32()
* self.shared.stream_data_rate as f32) as usize,
(READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* ping_time_seconds
* self.shared.stream_data_rate as f64) as usize,
* self.shared.stream_data_rate as f32) as usize,
);
min(length_to_request, self.shared.file_size - offset)
}
@ -449,7 +451,7 @@ impl Read for AudioFileStreaming {
download_status = self
.shared
.cond
.wait_timeout(download_status, Duration::from_millis(1000))
.wait_timeout(download_status, DOWNLOAD_TIMEOUT)
.unwrap()
.0;
}

View file

@ -1,12 +1,14 @@
use std::cmp::{max, min};
use std::io::{Seek, SeekFrom, Write};
use std::sync::{atomic, Arc};
use std::time::Instant;
use std::time::{Duration, Instant};
use atomic::Ordering;
use byteorder::{BigEndian, WriteBytesExt};
use bytes::Bytes;
use futures_util::StreamExt;
use librespot_core::channel::{Channel, ChannelData};
use librespot_core::packet::PacketType;
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
use tempfile::NamedTempFile;
@ -16,7 +18,7 @@ use crate::range_set::{Range, RangeSet};
use super::{AudioFileShared, DownloadStrategy, StreamLoaderCommand};
use super::{
FAST_PREFETCH_THRESHOLD_FACTOR, MAXIMUM_ASSUMED_PING_TIME_SECONDS, MAX_PREFETCH_REQUESTS,
FAST_PREFETCH_THRESHOLD_FACTOR, MAXIMUM_ASSUMED_PING_TIME, MAX_PREFETCH_REQUESTS,
MINIMUM_DOWNLOAD_SIZE, PREFETCH_THRESHOLD_FACTOR,
};
@ -46,7 +48,7 @@ pub fn request_range(session: &Session, file: FileId, offset: usize, length: usi
data.write_u32::<BigEndian>(start as u32).unwrap();
data.write_u32::<BigEndian>(end as u32).unwrap();
session.send_packet(0x8, data);
session.send_packet(PacketType::StreamChunk, data);
channel
}
@ -57,7 +59,7 @@ struct PartialFileData {
}
enum ReceivedData {
ResponseTimeMs(usize),
ResponseTime(Duration),
Data(PartialFileData),
}
@ -74,7 +76,7 @@ async fn receive_data(
let old_number_of_request = shared
.number_of_open_requests
.fetch_add(1, atomic::Ordering::SeqCst);
.fetch_add(1, Ordering::SeqCst);
let mut measure_ping_time = old_number_of_request == 0;
@ -86,14 +88,11 @@ async fn receive_data(
};
if measure_ping_time {
let duration = Instant::now() - request_sent_time;
let duration_ms: u64;
if 0.001 * (duration.as_millis() as f64) > MAXIMUM_ASSUMED_PING_TIME_SECONDS {
duration_ms = (MAXIMUM_ASSUMED_PING_TIME_SECONDS * 1000.0) as u64;
} else {
duration_ms = duration.as_millis() as u64;
let mut duration = Instant::now() - request_sent_time;
if duration > MAXIMUM_ASSUMED_PING_TIME {
duration = MAXIMUM_ASSUMED_PING_TIME;
}
let _ = file_data_tx.send(ReceivedData::ResponseTimeMs(duration_ms as usize));
let _ = file_data_tx.send(ReceivedData::ResponseTime(duration));
measure_ping_time = false;
}
let data_size = data.len();
@ -127,7 +126,7 @@ async fn receive_data(
shared
.number_of_open_requests
.fetch_sub(1, atomic::Ordering::SeqCst);
.fetch_sub(1, Ordering::SeqCst);
if result.is_err() {
warn!(
@ -149,7 +148,7 @@ struct AudioFileFetch {
file_data_tx: mpsc::UnboundedSender<ReceivedData>,
complete_tx: Option<oneshot::Sender<NamedTempFile>>,
network_response_times_ms: Vec<usize>,
network_response_times: Vec<Duration>,
}
// Might be replaced by enum from std once stable
@ -237,7 +236,7 @@ impl AudioFileFetch {
// download data from after the current read position first
let mut tail_end = RangeSet::new();
let read_position = self.shared.read_position.load(atomic::Ordering::Relaxed);
let read_position = self.shared.read_position.load(Ordering::Relaxed);
tail_end.add_range(&Range::new(
read_position,
self.shared.file_size - read_position,
@ -267,26 +266,23 @@ impl AudioFileFetch {
fn handle_file_data(&mut self, data: ReceivedData) -> ControlFlow {
match data {
ReceivedData::ResponseTimeMs(response_time_ms) => {
trace!("Ping time estimated as: {} ms.", response_time_ms);
ReceivedData::ResponseTime(response_time) => {
trace!("Ping time estimated as: {}ms", response_time.as_millis());
// record the response time
self.network_response_times_ms.push(response_time_ms);
// prune old response times. Keep at most three.
while self.network_response_times_ms.len() > 3 {
self.network_response_times_ms.remove(0);
// prune old response times. Keep at most two so we can push a third.
while self.network_response_times.len() >= 3 {
self.network_response_times.remove(0);
}
// record the response time
self.network_response_times.push(response_time);
// stats::median is experimental. So we calculate the median of up to three ourselves.
let ping_time_ms: usize = match self.network_response_times_ms.len() {
1 => self.network_response_times_ms[0] as usize,
2 => {
((self.network_response_times_ms[0] + self.network_response_times_ms[1])
/ 2) as usize
}
let ping_time = match self.network_response_times.len() {
1 => self.network_response_times[0],
2 => (self.network_response_times[0] + self.network_response_times[1]) / 2,
3 => {
let mut times = self.network_response_times_ms.clone();
let mut times = self.network_response_times.clone();
times.sort_unstable();
times[1]
}
@ -296,7 +292,7 @@ impl AudioFileFetch {
// store our new estimate for everyone to see
self.shared
.ping_time_ms
.store(ping_time_ms, atomic::Ordering::Relaxed);
.store(ping_time.as_millis() as usize, Ordering::Relaxed);
}
ReceivedData::Data(data) => {
self.output
@ -390,7 +386,7 @@ pub(super) async fn audio_file_fetch(
file_data_tx,
complete_tx: Some(complete_tx),
network_response_times_ms: Vec::new(),
network_response_times: Vec::with_capacity(3),
};
loop {
@ -408,10 +404,8 @@ pub(super) async fn audio_file_fetch(
}
if fetch.get_download_strategy() == DownloadStrategy::Streaming() {
let number_of_open_requests = fetch
.shared
.number_of_open_requests
.load(atomic::Ordering::SeqCst);
let number_of_open_requests =
fetch.shared.number_of_open_requests.load(Ordering::SeqCst);
if number_of_open_requests < MAX_PREFETCH_REQUESTS {
let max_requests_to_send = MAX_PREFETCH_REQUESTS - number_of_open_requests;
@ -424,14 +418,15 @@ pub(super) async fn audio_file_fetch(
};
let ping_time_seconds =
0.001 * fetch.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as f64;
Duration::from_millis(fetch.shared.ping_time_ms.load(Ordering::Relaxed) as u64)
.as_secs_f32();
let download_rate = fetch.session.channel().get_download_rate_estimate();
let desired_pending_bytes = max(
(PREFETCH_THRESHOLD_FACTOR
* ping_time_seconds
* fetch.shared.stream_data_rate as f64) as usize,
(FAST_PREFETCH_THRESHOLD_FACTOR * ping_time_seconds * download_rate as f64)
* fetch.shared.stream_data_rate as f32) as usize,
(FAST_PREFETCH_THRESHOLD_FACTOR * ping_time_seconds * download_rate as f32)
as usize,
);

View file

@ -11,6 +11,6 @@ mod range_set;
pub use decrypt::AudioDecrypt;
pub use fetch::{AudioFile, StreamLoaderController};
pub use fetch::{
READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_BEFORE_PLAYBACK_SECONDS,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS,
READ_AHEAD_BEFORE_PLAYBACK, READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS,
};

View file

@ -8,25 +8,15 @@ repository = "https://github.com/librespot-org/librespot"
edition = "2018"
[dependencies]
aes-ctr = "0.6"
base64 = "0.13"
form_urlencoded = "1.0"
futures-core = "0.3"
futures-util = { version = "0.3.5", default_features = false }
hmac = "0.11"
hyper = { version = "0.14", features = ["server", "http1", "tcp"] }
libmdns = "0.6"
log = "0.4"
protobuf = "2.14.0"
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.25"
sha-1 = "0.9"
tokio = { version = "1.0", features = ["macros", "rt", "sync"] }
serde_json = "1.0"
tokio = { version = "1.0", features = ["macros", "sync"] }
tokio-stream = "0.1.1"
url = "2.1"
dns-sd = { version = "0.1.3", optional = true }
[dependencies.librespot-core]
path = "../core"
@ -40,6 +30,9 @@ version = "0.2.0"
path = "../protocol"
version = "0.2.0"
[features]
with-dns-sd = ["dns-sd"]
[dependencies.librespot-discovery]
path = "../discovery"
version = "0.2.0"
[features]
with-dns-sd = ["librespot-discovery/with-dns-sd"]

View file

@ -1,203 +1,19 @@
use aes_ctr::cipher::generic_array::GenericArray;
use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher};
use aes_ctr::Aes128Ctr;
use futures_core::Stream;
use hmac::{Hmac, Mac, NewMac};
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Method, Request, Response, StatusCode};
use serde_json::json;
use sha1::{Digest, Sha1};
use tokio::sync::{mpsc, oneshot};
#[cfg(feature = "with-dns-sd")]
use dns_sd::DNSService;
use librespot_core::authentication::Credentials;
use librespot_core::config::ConnectConfig;
use librespot_core::diffie_hellman::DhLocalKeys;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::Infallible;
use std::io;
use std::net::{Ipv4Addr, SocketAddr};
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
type HmacSha1 = Hmac<Sha1>;
use futures_util::Stream;
use librespot_core::authentication::Credentials;
use librespot_core::config::ConnectConfig;
#[derive(Clone)]
struct Discovery(Arc<DiscoveryInner>);
struct DiscoveryInner {
config: ConnectConfig,
device_id: String,
keys: DhLocalKeys,
tx: mpsc::UnboundedSender<Credentials>,
}
pub struct DiscoveryStream(librespot_discovery::Discovery);
impl Discovery {
fn new(
config: ConnectConfig,
device_id: String,
) -> (Discovery, mpsc::UnboundedReceiver<Credentials>) {
let (tx, rx) = mpsc::unbounded_channel();
impl Stream for DiscoveryStream {
type Item = Credentials;
let discovery = Discovery(Arc::new(DiscoveryInner {
config,
device_id,
keys: DhLocalKeys::random(&mut rand::thread_rng()),
tx,
}));
(discovery, rx)
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.0).poll_next(cx)
}
fn handle_get_info(&self, _: BTreeMap<Cow<'_, str>, Cow<'_, str>>) -> Response<hyper::Body> {
let public_key = base64::encode(&self.0.keys.public_key());
let result = json!({
"status": 101,
"statusString": "ERROR-OK",
"spotifyError": 0,
"version": "2.7.1",
"deviceID": (self.0.device_id),
"remoteName": (self.0.config.name),
"activeUser": "",
"publicKey": (public_key),
"deviceType": (self.0.config.device_type.to_string().to_uppercase()),
"libraryVersion": "0.1.0",
"accountReq": "PREMIUM",
"brandDisplayName": "librespot",
"modelDisplayName": "librespot",
"resolverVersion": "0",
"groupStatus": "NONE",
"voiceSupport": "NO",
});
let body = result.to_string();
Response::new(Body::from(body))
}
fn handle_add_user(
&self,
params: BTreeMap<Cow<'_, str>, Cow<'_, str>>,
) -> Response<hyper::Body> {
let username = params.get("userName").unwrap().as_ref();
let encrypted_blob = params.get("blob").unwrap();
let client_key = params.get("clientKey").unwrap();
let encrypted_blob = base64::decode(encrypted_blob.as_bytes()).unwrap();
let shared_key = self
.0
.keys
.shared_secret(&base64::decode(client_key.as_bytes()).unwrap());
let iv = &encrypted_blob[0..16];
let encrypted = &encrypted_blob[16..encrypted_blob.len() - 20];
let cksum = &encrypted_blob[encrypted_blob.len() - 20..encrypted_blob.len()];
let base_key = Sha1::digest(&shared_key);
let base_key = &base_key[..16];
let checksum_key = {
let mut h = HmacSha1::new_from_slice(base_key).expect("HMAC can take key of any size");
h.update(b"checksum");
h.finalize().into_bytes()
};
let encryption_key = {
let mut h = HmacSha1::new_from_slice(&base_key).expect("HMAC can take key of any size");
h.update(b"encryption");
h.finalize().into_bytes()
};
let mut h = HmacSha1::new_from_slice(&checksum_key).expect("HMAC can take key of any size");
h.update(encrypted);
if h.verify(cksum).is_err() {
warn!("Login error for user {:?}: MAC mismatch", username);
let result = json!({
"status": 102,
"spotifyError": 1,
"statusString": "ERROR-MAC"
});
let body = result.to_string();
return Response::new(Body::from(body));
}
let decrypted = {
let mut data = encrypted.to_vec();
let mut cipher = Aes128Ctr::new(
&GenericArray::from_slice(&encryption_key[0..16]),
&GenericArray::from_slice(iv),
);
cipher.apply_keystream(&mut data);
String::from_utf8(data).unwrap()
};
let credentials =
Credentials::with_blob(username.to_string(), &decrypted, &self.0.device_id);
self.0.tx.send(credentials).unwrap();
let result = json!({
"status": 101,
"spotifyError": 0,
"statusString": "ERROR-OK"
});
let body = result.to_string();
Response::new(Body::from(body))
}
fn not_found(&self) -> Response<hyper::Body> {
let mut res = Response::default();
*res.status_mut() = StatusCode::NOT_FOUND;
res
}
async fn call(self, request: Request<Body>) -> hyper::Result<Response<Body>> {
let mut params = BTreeMap::new();
let (parts, body) = request.into_parts();
if let Some(query) = parts.uri.query() {
let query_params = url::form_urlencoded::parse(query.as_bytes());
params.extend(query_params);
}
if parts.method != Method::GET {
debug!("{:?} {:?} {:?}", parts.method, parts.uri.path(), params);
}
let body = hyper::body::to_bytes(body).await?;
params.extend(url::form_urlencoded::parse(&body));
Ok(
match (parts.method, params.get("action").map(AsRef::as_ref)) {
(Method::GET, Some("getInfo")) => self.handle_get_info(params),
(Method::POST, Some("addUser")) => self.handle_add_user(params),
_ => self.not_found(),
},
)
}
}
#[cfg(feature = "with-dns-sd")]
pub struct DiscoveryStream {
credentials: mpsc::UnboundedReceiver<Credentials>,
_svc: DNSService,
_close_tx: oneshot::Sender<Infallible>,
}
#[cfg(not(feature = "with-dns-sd"))]
pub struct DiscoveryStream {
credentials: mpsc::UnboundedReceiver<Credentials>,
_svc: libmdns::Service,
_close_tx: oneshot::Sender<Infallible>,
}
pub fn discovery(
@ -205,59 +21,11 @@ pub fn discovery(
device_id: String,
port: u16,
) -> io::Result<DiscoveryStream> {
let (discovery, creds_rx) = Discovery::new(config.clone(), device_id);
let (close_tx, close_rx) = oneshot::channel();
let address = SocketAddr::new(Ipv4Addr::UNSPECIFIED.into(), port);
let make_service = make_service_fn(move |_| {
let discovery = discovery.clone();
async move { Ok::<_, hyper::Error>(service_fn(move |request| discovery.clone().call(request))) }
});
let server = hyper::Server::bind(&address).serve(make_service);
let s_port = server.local_addr().port();
debug!("Zeroconf server listening on 0.0.0.0:{}", s_port);
tokio::spawn(server.with_graceful_shutdown(async {
close_rx.await.unwrap_err();
debug!("Shutting down discovery server");
}));
#[cfg(feature = "with-dns-sd")]
let svc = DNSService::register(
Some(&*config.name),
"_spotify-connect._tcp",
None,
None,
s_port,
&["VERSION=1.0", "CPath=/"],
)
.unwrap();
#[cfg(not(feature = "with-dns-sd"))]
let responder = libmdns::Responder::spawn(&tokio::runtime::Handle::current())?;
#[cfg(not(feature = "with-dns-sd"))]
let svc = responder.register(
"_spotify-connect._tcp".to_owned(),
config.name,
s_port,
&["VERSION=1.0", "CPath=/"],
);
Ok(DiscoveryStream {
credentials: creds_rx,
_svc: svc,
_close_tx: close_tx,
})
}
impl Stream for DiscoveryStream {
type Item = Credentials;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.credentials.poll_recv(cx)
}
librespot_discovery::Discovery::builder(device_id)
.device_type(config.device_type)
.port(port)
.name(config.name)
.launch()
.map(DiscoveryStream)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
}

View file

@ -6,5 +6,9 @@ use librespot_playback as playback;
use librespot_protocol as protocol;
pub mod context;
#[deprecated(
since = "0.2.1",
note = "Please use the crate `librespot_discovery` instead."
)]
pub mod discovery;
pub mod spirc;

View file

@ -3,7 +3,7 @@ use std::pin::Pin;
use std::time::{SystemTime, UNIX_EPOCH};
use crate::context::StationContext;
use crate::core::config::{ConnectConfig, VolumeCtrl};
use crate::core::config::ConnectConfig;
use crate::core::mercury::{MercuryError, MercurySender};
use crate::core::session::Session;
use crate::core::spotify_id::{SpotifyAudioType, SpotifyId, SpotifyIdError};
@ -54,7 +54,6 @@ struct SpircTask {
device: DeviceState,
state: State,
play_request_id: Option<u64>,
mixer_started: bool,
play_status: SpircPlayStatus,
subscription: BoxedStream<Frame>,
@ -82,13 +81,15 @@ pub enum SpircCommand {
}
struct SpircTaskConfig {
volume_ctrl: VolumeCtrl,
autoplay: bool,
}
const CONTEXT_TRACKS_HISTORY: usize = 10;
const CONTEXT_FETCH_THRESHOLD: u32 = 5;
const VOLUME_STEPS: i64 = 64;
const VOLUME_STEP_SIZE: u16 = 1024; // (u16::MAX + 1) / VOLUME_STEPS
pub struct Spirc {
commands: mpsc::UnboundedSender<SpircCommand>,
}
@ -163,10 +164,10 @@ fn initial_device_state(config: ConnectConfig) -> DeviceState {
msg.set_typ(protocol::spirc::CapabilityType::kVolumeSteps);
{
let repeated = msg.mut_intValue();
if let VolumeCtrl::Fixed = config.volume_ctrl {
repeated.push(0)
if config.has_volume_ctrl {
repeated.push(VOLUME_STEPS)
} else {
repeated.push(64)
repeated.push(0)
}
};
msg
@ -214,36 +215,6 @@ fn initial_device_state(config: ConnectConfig) -> DeviceState {
}
}
fn calc_logarithmic_volume(volume: u16) -> u16 {
// Volume conversion taken from https://www.dr-lex.be/info-stuff/volumecontrols.html#ideal2
// Convert the given volume [0..0xffff] to a dB gain
// We assume a dB range of 60dB.
// Use the equation: a * exp(b * x)
// in which a = IDEAL_FACTOR, b = 1/1000
const IDEAL_FACTOR: f64 = 6.908;
let normalized_volume = volume as f64 / std::u16::MAX as f64; // To get a value between 0 and 1
let mut val = std::u16::MAX;
// Prevent val > std::u16::MAX due to rounding errors
if normalized_volume < 0.999 {
let new_volume = (normalized_volume * IDEAL_FACTOR).exp() / 1000.0;
val = (new_volume * std::u16::MAX as f64) as u16;
}
debug!("input volume:{} to mixer: {}", volume, val);
// return the scale factor (0..0xffff) (equivalent to a voltage multiplier).
val
}
fn volume_to_mixer(volume: u16, volume_ctrl: &VolumeCtrl) -> u16 {
match volume_ctrl {
VolumeCtrl::Linear => volume,
VolumeCtrl::Log => calc_logarithmic_volume(volume),
VolumeCtrl::Fixed => volume,
}
}
fn url_encode(bytes: impl AsRef<[u8]>) -> String {
form_urlencoded::byte_serialize(bytes.as_ref()).collect()
}
@ -280,9 +251,8 @@ impl Spirc {
let (cmd_tx, cmd_rx) = mpsc::unbounded_channel();
let volume = config.volume;
let initial_volume = config.initial_volume;
let task_config = SpircTaskConfig {
volume_ctrl: config.volume_ctrl.to_owned(),
autoplay: config.autoplay,
};
@ -302,7 +272,6 @@ impl Spirc {
device,
state: initial_state(),
play_request_id: None,
mixer_started: false,
play_status: SpircPlayStatus::Stopped,
subscription,
@ -318,7 +287,12 @@ impl Spirc {
context: None,
};
task.set_volume(volume);
if let Some(volume) = initial_volume {
task.set_volume(volume);
} else {
let current_volume = task.mixer.volume();
task.set_volume(current_volume);
}
let spirc = Spirc { commands: cmd_tx };
@ -437,20 +411,6 @@ impl SpircTask {
dur.as_millis() as i64 + 1000 * self.session.time_delta()
}
fn ensure_mixer_started(&mut self) {
if !self.mixer_started {
self.mixer.start();
self.mixer_started = true;
}
}
fn ensure_mixer_stopped(&mut self) {
if self.mixer_started {
self.mixer.stop();
self.mixer_started = false;
}
}
fn update_state_position(&mut self, position_ms: u32) {
let now = self.now_ms();
self.state.set_position_measured_at(now as u64);
@ -600,7 +560,6 @@ impl SpircTask {
_ => {
warn!("The player has stopped unexpectedly.");
self.state.set_status(PlayStatus::kPlayStatusStop);
self.ensure_mixer_stopped();
self.notify(None, true);
self.play_status = SpircPlayStatus::Stopped;
}
@ -659,7 +618,6 @@ impl SpircTask {
info!("No more tracks left in queue");
self.state.set_status(PlayStatus::kPlayStatusStop);
self.player.stop();
self.mixer.stop();
self.play_status = SpircPlayStatus::Stopped;
}
@ -767,7 +725,6 @@ impl SpircTask {
self.device.set_is_active(false);
self.state.set_status(PlayStatus::kPlayStatusStop);
self.player.stop();
self.ensure_mixer_stopped();
self.play_status = SpircPlayStatus::Stopped;
}
}
@ -782,7 +739,11 @@ impl SpircTask {
position_ms,
preloading_of_next_track_triggered,
} => {
self.ensure_mixer_started();
// Synchronize the volume from the mixer. This is useful on
// systems that can switch sources from and back to librespot.
let current_volume = self.mixer.volume();
self.set_volume(current_volume);
self.player.play();
self.state.set_status(PlayStatus::kPlayStatusPlay);
self.update_state_position(position_ms);
@ -792,7 +753,6 @@ impl SpircTask {
};
}
SpircPlayStatus::LoadingPause { position_ms } => {
self.ensure_mixer_started();
self.player.play();
self.play_status = SpircPlayStatus::LoadingPlay { position_ms };
}
@ -962,7 +922,6 @@ impl SpircTask {
self.state.set_playing_track_index(0);
self.state.set_status(PlayStatus::kPlayStatusStop);
self.player.stop();
self.ensure_mixer_stopped();
self.play_status = SpircPlayStatus::Stopped;
}
}
@ -1007,19 +966,13 @@ impl SpircTask {
}
fn handle_volume_up(&mut self) {
let mut volume: u32 = self.device.get_volume() as u32 + 4096;
if volume > 0xFFFF {
volume = 0xFFFF;
}
self.set_volume(volume as u16);
let volume = (self.device.get_volume() as u16).saturating_add(VOLUME_STEP_SIZE);
self.set_volume(volume);
}
fn handle_volume_down(&mut self) {
let mut volume: i32 = self.device.get_volume() as i32 - 4096;
if volume < 0 {
volume = 0;
}
self.set_volume(volume as u16);
let volume = (self.device.get_volume() as u16).saturating_sub(VOLUME_STEP_SIZE);
self.set_volume(volume);
}
fn handle_end_of_track(&mut self) {
@ -1243,7 +1196,6 @@ impl SpircTask {
None => {
self.state.set_status(PlayStatus::kPlayStatusStop);
self.player.stop();
self.ensure_mixer_stopped();
self.play_status = SpircPlayStatus::Stopped;
}
}
@ -1273,8 +1225,7 @@ impl SpircTask {
fn set_volume(&mut self, volume: u16) {
self.device.set_volume(volume as u32);
self.mixer
.set_volume(volume_to_mixer(volume, &self.config.volume_ctrl));
self.mixer.set_volume(volume);
if let Some(cache) = self.session.cache() {
cache.save_volume(volume)
}

View file

@ -1,5 +1,7 @@
[Unit]
Description=Librespot
Description=Librespot (an open source Spotify client)
Documentation=https://github.com/librespot-org/librespot
Documentation=https://github.com/librespot-org/librespot/wiki/Options
Requires=network-online.target
After=network-online.target
@ -8,8 +10,7 @@ User=nobody
Group=audio
Restart=always
RestartSec=10
ExecStart=/usr/bin/librespot -n "%p on %H"
ExecStart=/usr/bin/librespot --name "%p@%H"
[Install]
WantedBy=multi-user.target

View file

@ -0,0 +1,12 @@
[Unit]
Description=Librespot (an open source Spotify client)
Documentation=https://github.com/librespot-org/librespot
Documentation=https://github.com/librespot-org/librespot/wiki/Options
[Service]
Restart=always
RestartSec=10
ExecStart=/usr/bin/librespot --name "%u@%H"
[Install]
WantedBy=default.target

View file

@ -26,7 +26,9 @@ http = "0.2"
hyper = { version = "0.14", features = ["client", "tcp", "http1"] }
hyper-proxy = { version = "0.9.1", default-features = false }
log = "0.4"
num = "0.4"
num-bigint = { version = "0.4", features = ["rand"] }
num-derive = "0.3"
num-integer = "0.1"
num-traits = "0.2"
once_cell = "1.5.2"

View file

@ -1,132 +1,141 @@
use std::error::Error;
use hyper::client::HttpConnector;
use hyper::{Body, Client, Request};
use hyper_proxy::{Intercept, Proxy, ProxyConnector};
use hyper::{Body, Request};
use serde::Deserialize;
use url::Url;
const APRESOLVE_ENDPOINT: &str =
"http://apresolve.spotify.com/?type=accesspoint&type=dealer&type=spclient";
// These addresses probably do some geo-location based traffic management or at least DNS-based
// load balancing. They are known to fail when the normal resolvers are up, so that's why they
// should only be used as fallback.
const AP_FALLBACK: &str = "ap.spotify.com";
const DEALER_FALLBACK: &str = "dealer.spotify.com";
const SPCLIENT_FALLBACK: &str = "spclient.wg.spotify.com";
const FALLBACK_PORT: u16 = 443;
use std::error::Error;
use std::sync::atomic::{AtomicUsize, Ordering};
pub type SocketAddress = (String, u16);
#[derive(Clone, Debug, Default, Deserialize)]
#[derive(Default)]
struct AccessPoints {
accesspoint: Vec<SocketAddress>,
dealer: Vec<SocketAddress>,
spclient: Vec<SocketAddress>,
}
#[derive(Deserialize)]
struct ApResolveData {
accesspoint: Vec<String>,
dealer: Vec<String>,
spclient: Vec<String>,
}
#[derive(Clone, Debug, Deserialize)]
pub struct AccessPoints {
pub accesspoint: SocketAddress,
pub dealer: SocketAddress,
pub spclient: SocketAddress,
}
fn select_ap(data: Vec<String>, fallback: &str, ap_port: Option<u16>) -> SocketAddress {
let port = ap_port.unwrap_or(FALLBACK_PORT);
let mut aps = data.into_iter().filter_map(|ap| {
let mut split = ap.rsplitn(2, ':');
let port = split
.next()
.expect("rsplitn should not return empty iterator");
let host = split.next()?.to_owned();
let port: u16 = port.parse().ok()?;
Some((host, port))
});
let ap = if ap_port.is_some() {
aps.find(|(_, p)| *p == port)
} else {
aps.next()
};
ap.unwrap_or_else(|| (String::from(fallback), port))
}
async fn try_apresolve(proxy: Option<&Url>) -> Result<ApResolveData, Box<dyn Error>> {
let req = Request::builder()
.method("GET")
.uri(APRESOLVE_ENDPOINT)
.body(Body::empty())
.unwrap();
let response = if let Some(url) = proxy {
// Panic safety: all URLs are valid URIs
let uri = url.to_string().parse().unwrap();
let proxy = Proxy::new(Intercept::All, uri);
let connector = HttpConnector::new();
let proxy_connector = ProxyConnector::from_proxy_unsecured(connector, proxy);
Client::builder()
.build(proxy_connector)
.request(req)
.await?
} else {
Client::new().request(req).await?
};
let body = hyper::body::to_bytes(response.into_body()).await?;
let data: ApResolveData = serde_json::from_slice(body.as_ref())?;
Ok(data)
}
pub async fn apresolve(proxy: Option<&Url>, ap_port: Option<u16>) -> AccessPoints {
let data = try_apresolve(proxy).await.unwrap_or_else(|e| {
warn!("Failed to resolve access points: {}, using fallbacks.", e);
ApResolveData::default()
});
let accesspoint = select_ap(data.accesspoint, AP_FALLBACK, ap_port);
let dealer = select_ap(data.dealer, DEALER_FALLBACK, ap_port);
let spclient = select_ap(data.spclient, SPCLIENT_FALLBACK, ap_port);
AccessPoints {
accesspoint,
dealer,
spclient,
// These addresses probably do some geo-location based traffic management or at least DNS-based
// load balancing. They are known to fail when the normal resolvers are up, so that's why they
// should only be used as fallback.
impl Default for ApResolveData {
fn default() -> Self {
Self {
accesspoint: vec![String::from("ap.spotify.com:443")],
dealer: vec![String::from("dealer.spotify.com:443")],
spclient: vec![String::from("spclient.wg.spotify.com:443")],
}
}
}
#[cfg(test)]
mod test {
use std::net::ToSocketAddrs;
use super::apresolve;
#[tokio::test]
async fn test_apresolve() {
let aps = apresolve(None, None).await;
// Assert that the result contains a valid host and port
aps.accesspoint.to_socket_addrs().unwrap().next().unwrap();
aps.dealer.to_socket_addrs().unwrap().next().unwrap();
aps.spclient.to_socket_addrs().unwrap().next().unwrap();
}
#[tokio::test]
async fn test_apresolve_port_443() {
let aps = apresolve(None, Some(443)).await;
let port = aps
.accesspoint
.to_socket_addrs()
.unwrap()
.next()
.unwrap()
.port();
assert_eq!(port, 443);
component! {
ApResolver : ApResolverInner {
data: AccessPoints = AccessPoints::default(),
spinlock: AtomicUsize = AtomicUsize::new(0),
}
}
impl ApResolver {
// return a port if a proxy URL and/or a proxy port was specified. This is useful even when
// there is no proxy, but firewalls only allow certain ports (e.g. 443 and not 4070).
fn port_config(&self) -> Option<u16> {
if self.session().config().proxy.is_some() || self.session().config().ap_port.is_some() {
Some(self.session().config().ap_port.unwrap_or(443))
} else {
None
}
}
fn process_data(&self, data: Vec<String>) -> Vec<SocketAddress> {
data.into_iter()
.filter_map(|ap| {
let mut split = ap.rsplitn(2, ':');
let port = split
.next()
.expect("rsplitn should not return empty iterator");
let host = split.next()?.to_owned();
let port: u16 = port.parse().ok()?;
if let Some(p) = self.port_config() {
if p != port {
return None;
}
}
Some((host, port))
})
.collect()
}
async fn try_apresolve(&self) -> Result<ApResolveData, Box<dyn Error>> {
let req = Request::builder()
.method("GET")
.uri("http://apresolve.spotify.com/?type=accesspoint&type=dealer&type=spclient")
.body(Body::empty())
.unwrap();
let body = self.session().http_client().request_body(req).await?;
let data: ApResolveData = serde_json::from_slice(body.as_ref())?;
Ok(data)
}
async fn apresolve(&self) {
let result = self.try_apresolve().await;
self.lock(|inner| {
let data = match result {
Ok(data) => data,
Err(e) => {
warn!("Failed to resolve access points, using fallbacks: {}", e);
ApResolveData::default()
}
};
inner.data.accesspoint = self.process_data(data.accesspoint);
inner.data.dealer = self.process_data(data.dealer);
inner.data.spclient = self.process_data(data.spclient);
})
}
fn is_empty(&self) -> bool {
self.lock(|inner| {
inner.data.accesspoint.is_empty()
|| inner.data.dealer.is_empty()
|| inner.data.spclient.is_empty()
})
}
pub async fn resolve(&self, endpoint: &str) -> SocketAddress {
// Use a spinlock to make this function atomic. Otherwise, various race conditions may
// occur, e.g. when the session is created, multiple components are launched almost in
// parallel and they will all call this function, while resolving is still in progress.
self.lock(|inner| {
while inner.spinlock.load(Ordering::SeqCst) != 0 {
#[allow(deprecated)]
std::sync::atomic::spin_loop_hint()
}
inner.spinlock.store(1, Ordering::SeqCst);
});
if self.is_empty() {
self.apresolve().await;
}
self.lock(|inner| {
let access_point = match endpoint {
// take the first position instead of the last with `pop`, because Spotify returns
// access points with ports 4070, 443 and 80 in order of preference from highest
// to lowest.
"accesspoint" => inner.data.accesspoint.remove(0),
"dealer" => inner.data.dealer.remove(0),
"spclient" => inner.data.spclient.remove(0),
_ => unimplemented!(),
};
inner.spinlock.store(0, Ordering::SeqCst);
access_point
})
}
}

View file

@ -4,6 +4,7 @@ use std::collections::HashMap;
use std::io::Write;
use tokio::sync::oneshot;
use crate::packet::PacketType;
use crate::spotify_id::{FileId, SpotifyId};
use crate::util::SeqGenerator;
@ -21,19 +22,19 @@ component! {
}
impl AudioKeyManager {
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) {
pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) {
let seq = BigEndian::read_u32(data.split_to(4).as_ref());
let sender = self.lock(|inner| inner.pending.remove(&seq));
if let Some(sender) = sender {
match cmd {
0xd => {
PacketType::AesKey => {
let mut key = [0u8; 16];
key.copy_from_slice(data.as_ref());
let _ = sender.send(Ok(AudioKey(key)));
}
0xe => {
PacketType::AesKeyError => {
warn!(
"error audio key {:x} {:x}",
data.as_ref()[0],
@ -61,11 +62,11 @@ impl AudioKeyManager {
fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) {
let mut data: Vec<u8> = Vec::new();
data.write(&file.0).unwrap();
data.write(&track.to_raw()).unwrap();
data.write_all(&file.0).unwrap();
data.write_all(&track.to_raw()).unwrap();
data.write_u32::<BigEndian>(seq).unwrap();
data.write_u16::<BigEndian>(0x0000).unwrap();
self.session().send_packet(0xc, data)
self.session().send_packet(PacketType::RequestKey, data)
}
}

View file

@ -8,8 +8,10 @@ use bytes::Bytes;
use futures_core::Stream;
use futures_util::lock::BiLock;
use futures_util::{ready, StreamExt};
use num_traits::FromPrimitive;
use tokio::sync::mpsc;
use crate::packet::PacketType;
use crate::util::SeqGenerator;
component! {
@ -23,6 +25,8 @@ component! {
}
}
const ONE_SECOND_IN_MS: usize = 1000;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub struct ChannelError;
@ -66,7 +70,7 @@ impl ChannelManager {
(seq, channel)
}
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) {
pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) {
use std::collections::hash_map::Entry;
let id: u16 = BigEndian::read_u16(data.split_to(2).as_ref());
@ -74,8 +78,11 @@ impl ChannelManager {
self.lock(|inner| {
let current_time = Instant::now();
if let Some(download_measurement_start) = inner.download_measurement_start {
if (current_time - download_measurement_start).as_millis() > 1000 {
inner.download_rate_estimate = 1000 * inner.download_measurement_bytes
if (current_time - download_measurement_start).as_millis()
> ONE_SECOND_IN_MS as u128
{
inner.download_rate_estimate = ONE_SECOND_IN_MS
* inner.download_measurement_bytes
/ (current_time - download_measurement_start).as_millis() as usize;
inner.download_measurement_start = Some(current_time);
inner.download_measurement_bytes = 0;
@ -87,7 +94,7 @@ impl ChannelManager {
inner.download_measurement_bytes += data.len();
if let Entry::Occupied(entry) = inner.channels.entry(id) {
let _ = entry.get().send((cmd, data));
let _ = entry.get().send((cmd as u8, data));
}
});
}
@ -109,7 +116,8 @@ impl Channel {
fn recv_packet(&mut self, cx: &mut Context<'_>) -> Poll<Result<Bytes, ChannelError>> {
let (cmd, packet) = ready!(self.receiver.poll_recv(cx)).ok_or(ChannelError)?;
if cmd == 0xa {
let packet_type = FromPrimitive::from_u8(cmd);
if let Some(PacketType::ChannelError) = packet_type {
let code = BigEndian::read_u16(&packet.as_ref()[..2]);
error!("channel error: {} {}", packet.len(), code);

View file

@ -71,30 +71,43 @@ impl FromStr for DeviceType {
}
}
impl From<&DeviceType> for &str {
fn from(d: &DeviceType) -> &'static str {
use self::DeviceType::*;
match d {
Unknown => "Unknown",
Computer => "Computer",
Tablet => "Tablet",
Smartphone => "Smartphone",
Speaker => "Speaker",
Tv => "TV",
Avr => "AVR",
Stb => "STB",
AudioDongle => "AudioDongle",
GameConsole => "GameConsole",
CastAudio => "CastAudio",
CastVideo => "CastVideo",
Automobile => "Automobile",
Smartwatch => "Smartwatch",
Chromebook => "Chromebook",
UnknownSpotify => "UnknownSpotify",
CarThing => "CarThing",
Observer => "Observer",
HomeThing => "HomeThing",
}
}
}
impl From<DeviceType> for &str {
fn from(d: DeviceType) -> &'static str {
(&d).into()
}
}
impl fmt::Display for DeviceType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::DeviceType::*;
match *self {
Unknown => f.write_str("Unknown"),
Computer => f.write_str("Computer"),
Tablet => f.write_str("Tablet"),
Smartphone => f.write_str("Smartphone"),
Speaker => f.write_str("Speaker"),
Tv => f.write_str("TV"),
Avr => f.write_str("AVR"),
Stb => f.write_str("STB"),
AudioDongle => f.write_str("AudioDongle"),
GameConsole => f.write_str("GameConsole"),
CastAudio => f.write_str("CastAudio"),
CastVideo => f.write_str("CastVideo"),
Automobile => f.write_str("Automobile"),
Smartwatch => f.write_str("Smartwatch"),
Chromebook => f.write_str("Chromebook"),
UnknownSpotify => f.write_str("UnknownSpotify"),
CarThing => f.write_str("CarThing"),
Observer => f.write_str("Observer"),
HomeThing => f.write_str("HomeThing"),
}
let str: &str = self.into();
f.write_str(str)
}
}
@ -108,33 +121,7 @@ impl Default for DeviceType {
pub struct ConnectConfig {
pub name: String,
pub device_type: DeviceType,
pub volume: u16,
pub volume_ctrl: VolumeCtrl,
pub initial_volume: Option<u16>,
pub has_volume_ctrl: bool,
pub autoplay: bool,
}
#[derive(Clone, Debug)]
pub enum VolumeCtrl {
Linear,
Log,
Fixed,
}
impl FromStr for VolumeCtrl {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
use self::VolumeCtrl::*;
match s.to_lowercase().as_ref() {
"linear" => Ok(Linear),
"log" => Ok(Log),
"fixed" => Ok(Fixed),
_ => Err(()),
}
}
}
impl Default for VolumeCtrl {
fn default() -> VolumeCtrl {
VolumeCtrl::Log
}
}

View file

@ -7,6 +7,7 @@ pub use self::handshake::handshake;
use std::io::{self, ErrorKind};
use futures_util::{SinkExt, StreamExt};
use num_traits::FromPrimitive;
use protobuf::{self, Message, ProtobufError};
use thiserror::Error;
use tokio::net::TcpStream;
@ -14,6 +15,7 @@ use tokio_util::codec::Framed;
use url::Url;
use crate::authentication::Credentials;
use crate::packet::PacketType;
use crate::protocol::keyexchange::{APLoginFailed, ErrorCode};
use crate::version;
@ -95,13 +97,14 @@ pub async fn authenticate(
.set_device_id(device_id.to_string());
packet.set_version_string(version::VERSION_STRING.to_string());
let cmd = 0xab;
let cmd = PacketType::Login;
let data = packet.write_to_bytes().unwrap();
transport.send((cmd, data)).await?;
transport.send((cmd as u8, data)).await?;
let (cmd, data) = transport.next().await.expect("EOF")?;
match cmd {
0xac => {
let packet_type = FromPrimitive::from_u8(cmd);
match packet_type {
Some(PacketType::APWelcome) => {
let welcome_data = APWelcome::parse_from_bytes(data.as_ref())?;
let reusable_credentials = Credentials {
@ -112,7 +115,7 @@ pub async fn authenticate(
Ok(reusable_credentials)
}
0xad => {
Some(PacketType::AuthFailure) => {
let error_data = APLoginFailed::parse_from_bytes(data.as_ref())?;
Err(error_data.into())
}

34
core/src/http_client.rs Normal file
View file

@ -0,0 +1,34 @@
use hyper::client::HttpConnector;
use hyper::{Body, Client, Request, Response};
use hyper_proxy::{Intercept, Proxy, ProxyConnector};
use url::Url;
pub struct HttpClient {
proxy: Option<Url>,
}
impl HttpClient {
pub fn new(proxy: Option<&Url>) -> Self {
Self {
proxy: proxy.cloned(),
}
}
pub async fn request(&self, req: Request<Body>) -> Result<Response<Body>, hyper::Error> {
if let Some(url) = &self.proxy {
// Panic safety: all URLs are valid URIs
let uri = url.to_string().parse().unwrap();
let proxy = Proxy::new(Intercept::All, uri);
let connector = HttpConnector::new();
let proxy_connector = ProxyConnector::from_proxy_unsecured(connector, proxy);
Client::builder().build(proxy_connector).request(req).await
} else {
Client::new().request(req).await
}
}
pub async fn request_body(&self, req: Request<Body>) -> Result<bytes::Bytes, hyper::Error> {
let response = self.request(req).await?;
hyper::body::to_bytes(response.into_body()).await
}
}

View file

@ -1,26 +0,0 @@
use serde::Deserialize;
use crate::{mercury::MercuryError, session::Session};
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Token {
pub access_token: String,
pub expires_in: u32,
pub token_type: String,
pub scope: Vec<String>,
}
pub async fn get_token(
session: &Session,
client_id: &str,
scopes: &str,
) -> Result<Token, MercuryError> {
let url = format!(
"hm://keymaster/token/authenticated?client_id={}&scope={}",
client_id, scopes
);
let response = session.mercury().get(url).await?;
let data = response.payload.first().expect("Empty payload");
serde_json::from_slice(data.as_ref()).map_err(|_| MercuryError)
}

View file

@ -1,7 +1,6 @@
#![allow(clippy::unused_io_amount)]
#[macro_use]
extern crate log;
extern crate num_derive;
use librespot_protocol as protocol;
@ -19,12 +18,15 @@ mod connection;
mod dealer;
#[doc(hidden)]
pub mod diffie_hellman;
pub mod keymaster;
mod http_client;
pub mod mercury;
pub mod packet;
mod proxytunnel;
pub mod session;
mod socket;
mod spclient;
pub mod spotify_id;
mod token;
#[doc(hidden)]
pub mod util;
pub mod version;

View file

@ -11,6 +11,7 @@ use futures_util::FutureExt;
use protobuf::Message;
use tokio::sync::{mpsc, oneshot};
use crate::packet::PacketType;
use crate::protocol;
use crate::util::SeqGenerator;
@ -143,7 +144,7 @@ impl MercuryManager {
}
}
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) {
pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) {
let seq_len = BigEndian::read_u16(data.split_to(2).as_ref()) as usize;
let seq = data.split_to(seq_len).as_ref().to_owned();
@ -154,14 +155,17 @@ impl MercuryManager {
let mut pending = match pending {
Some(pending) => pending,
None if cmd == 0xb5 => MercuryPending {
parts: Vec::new(),
partial: None,
callback: None,
},
None => {
warn!("Ignore seq {:?} cmd {:x}", seq, cmd);
return;
if let PacketType::MercuryEvent = cmd {
MercuryPending {
parts: Vec::new(),
partial: None,
callback: None,
}
} else {
warn!("Ignore seq {:?} cmd {:x}", seq, cmd as u8);
return;
}
}
};
@ -191,7 +195,7 @@ impl MercuryManager {
data.split_to(size).as_ref().to_owned()
}
fn complete_request(&self, cmd: u8, mut pending: MercuryPending) {
fn complete_request(&self, cmd: PacketType, mut pending: MercuryPending) {
let header_data = pending.parts.remove(0);
let header = protocol::mercury::Header::parse_from_bytes(&header_data).unwrap();
@ -208,7 +212,7 @@ impl MercuryManager {
if let Some(cb) = pending.callback {
let _ = cb.send(Err(MercuryError));
}
} else if cmd == 0xb5 {
} else if let PacketType::MercuryEvent = cmd {
self.lock(|inner| {
let mut found = false;

View file

@ -2,6 +2,7 @@ use byteorder::{BigEndian, WriteBytesExt};
use protobuf::Message;
use std::io::Write;
use crate::packet::PacketType;
use crate::protocol;
#[derive(Debug, PartialEq, Eq)]
@ -43,11 +44,12 @@ impl ToString for MercuryMethod {
}
impl MercuryMethod {
pub fn command(&self) -> u8 {
pub fn command(&self) -> PacketType {
use PacketType::*;
match *self {
MercuryMethod::Get | MercuryMethod::Send => 0xb2,
MercuryMethod::Sub => 0xb3,
MercuryMethod::Unsub => 0xb4,
MercuryMethod::Get | MercuryMethod::Send => MercuryReq,
MercuryMethod::Sub => MercurySub,
MercuryMethod::Unsub => MercuryUnsub,
}
}
}
@ -77,7 +79,7 @@ impl MercuryRequest {
for p in &self.payload {
packet.write_u16::<BigEndian>(p.len() as u16).unwrap();
packet.write(p).unwrap();
packet.write_all(p).unwrap();
}
packet

41
core/src/packet.rs Normal file
View file

@ -0,0 +1,41 @@
// Ported from librespot-java. Relicensed under MIT with permission.
use num_derive::{FromPrimitive, ToPrimitive};
#[derive(Debug, FromPrimitive, ToPrimitive)]
pub enum PacketType {
SecretBlock = 0x02,
Ping = 0x04,
StreamChunk = 0x08,
StreamChunkRes = 0x09,
ChannelError = 0x0a,
ChannelAbort = 0x0b,
RequestKey = 0x0c,
AesKey = 0x0d,
AesKeyError = 0x0e,
Image = 0x19,
CountryCode = 0x1b,
Pong = 0x49,
PongAck = 0x4a,
Pause = 0x4b,
ProductInfo = 0x50,
LegacyWelcome = 0x69,
LicenseVersion = 0x76,
Login = 0xab,
APWelcome = 0xac,
AuthFailure = 0xad,
MercuryReq = 0xb2,
MercurySub = 0xb3,
MercuryUnsub = 0xb4,
MercuryEvent = 0xb5,
TrackEndedTime = 0x82,
UnknownDataAllZeros = 0x1f,
PreferredLocale = 0x74,
Unknown0x0f = 0x0f,
Unknown0x10 = 0x10,
Unknown0x4f = 0x4f,
// TODO - occurs when subscribing with an empty URI. Maybe a MercuryError?
// Payload: b"\0\x08\0\0\0\0\0\0\0\0\x01\0\x01\0\x03 \xb0\x06"
Unknown0xb6 = 0xb6,
}

View file

@ -11,19 +11,23 @@ use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes;
use futures_core::TryStream;
use futures_util::{future, ready, StreamExt, TryStreamExt};
use num_traits::FromPrimitive;
use once_cell::sync::OnceCell;
use thiserror::Error;
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::apresolve::apresolve;
use crate::apresolve::ApResolver;
use crate::audio_key::AudioKeyManager;
use crate::authentication::Credentials;
use crate::cache::Cache;
use crate::channel::ChannelManager;
use crate::config::SessionConfig;
use crate::connection::{self, AuthenticationError};
use crate::http_client::HttpClient;
use crate::mercury::MercuryManager;
use crate::packet::PacketType;
use crate::token::TokenProvider;
#[derive(Debug, Error)]
pub enum SessionError {
@ -44,11 +48,14 @@ struct SessionInternal {
config: SessionConfig,
data: RwLock<SessionData>,
http_client: HttpClient,
tx_connection: mpsc::UnboundedSender<(u8, Vec<u8>)>,
apresolver: OnceCell<ApResolver>,
audio_key: OnceCell<AudioKeyManager>,
channel: OnceCell<ChannelManager>,
mercury: OnceCell<MercuryManager>,
token_provider: OnceCell<TokenProvider>,
cache: Option<Arc<Cache>>,
handle: tokio::runtime::Handle,
@ -67,40 +74,7 @@ impl Session {
credentials: Credentials,
cache: Option<Cache>,
) -> Result<Session, SessionError> {
let ap = apresolve(config.proxy.as_ref(), config.ap_port)
.await
.accesspoint;
info!("Connecting to AP \"{}:{}\"", ap.0, ap.1);
let mut conn = connection::connect(&ap.0, ap.1, config.proxy.as_ref()).await?;
let reusable_credentials =
connection::authenticate(&mut conn, credentials, &config.device_id).await?;
info!("Authenticated as \"{}\" !", reusable_credentials.username);
if let Some(cache) = &cache {
cache.save_credentials(&reusable_credentials);
}
let session = Session::create(
conn,
config,
cache,
reusable_credentials.username,
tokio::runtime::Handle::current(),
);
Ok(session)
}
fn create(
transport: connection::Transport,
config: SessionConfig,
cache: Option<Cache>,
username: String,
handle: tokio::runtime::Handle,
) -> Session {
let (sink, stream) = transport.split();
let http_client = HttpClient::new(config.proxy.as_ref());
let (sender_tx, sender_rx) = mpsc::unbounded_channel();
let session_id = SESSION_COUNTER.fetch_add(1, Ordering::Relaxed);
@ -110,19 +84,37 @@ impl Session {
config,
data: RwLock::new(SessionData {
country: String::new(),
canonical_username: username,
canonical_username: String::new(),
invalid: false,
time_delta: 0,
}),
http_client,
tx_connection: sender_tx,
cache: cache.map(Arc::new),
apresolver: OnceCell::new(),
audio_key: OnceCell::new(),
channel: OnceCell::new(),
mercury: OnceCell::new(),
handle,
token_provider: OnceCell::new(),
handle: tokio::runtime::Handle::current(),
session_id,
}));
let ap = session.apresolver().resolve("accesspoint").await;
info!("Connecting to AP \"{}:{}\"", ap.0, ap.1);
let mut transport =
connection::connect(&ap.0, ap.1, session.config().proxy.as_ref()).await?;
let reusable_credentials =
connection::authenticate(&mut transport, credentials, &session.config().device_id)
.await?;
info!("Authenticated as \"{}\" !", reusable_credentials.username);
session.0.data.write().unwrap().canonical_username = reusable_credentials.username.clone();
if let Some(cache) = session.cache() {
cache.save_credentials(&reusable_credentials);
}
let (sink, stream) = transport.split();
let sender_task = UnboundedReceiverStream::new(sender_rx)
.map(Ok)
.forward(sink);
@ -136,7 +128,13 @@ impl Session {
}
});
session
Ok(session)
}
pub fn apresolver(&self) -> &ApResolver {
self.0
.apresolver
.get_or_init(|| ApResolver::new(self.weak()))
}
pub fn audio_key(&self) -> &AudioKeyManager {
@ -151,12 +149,22 @@ impl Session {
.get_or_init(|| ChannelManager::new(self.weak()))
}
pub fn http_client(&self) -> &HttpClient {
&self.0.http_client
}
pub fn mercury(&self) -> &MercuryManager {
self.0
.mercury
.get_or_init(|| MercuryManager::new(self.weak()))
}
pub fn token_provider(&self) -> &TokenProvider {
self.0
.token_provider
.get_or_init(|| TokenProvider::new(self.weak()))
}
pub fn time_delta(&self) -> i64 {
self.0.data.read().unwrap().time_delta
}
@ -178,10 +186,11 @@ impl Session {
);
}
#[allow(clippy::match_same_arms)]
fn dispatch(&self, cmd: u8, data: Bytes) {
match cmd {
0x4 => {
use PacketType::*;
let packet_type = FromPrimitive::from_u8(cmd);
match packet_type {
Some(Ping) => {
let server_timestamp = BigEndian::read_u32(data.as_ref()) as i64;
let timestamp = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(dur) => dur,
@ -192,31 +201,47 @@ impl Session {
self.0.data.write().unwrap().time_delta = server_timestamp - timestamp;
self.debug_info();
self.send_packet(0x49, vec![0, 0, 0, 0]);
self.send_packet(Pong, vec![0, 0, 0, 0]);
}
0x4a => (),
0x1b => {
Some(CountryCode) => {
let country = String::from_utf8(data.as_ref().to_owned()).unwrap();
info!("Country: {:?}", country);
self.0.data.write().unwrap().country = country;
}
0x9 | 0xa => self.channel().dispatch(cmd, data),
0xd | 0xe => self.audio_key().dispatch(cmd, data),
0xb2..=0xb6 => self.mercury().dispatch(cmd, data),
_ => (),
Some(StreamChunkRes) | Some(ChannelError) => {
self.channel().dispatch(packet_type.unwrap(), data);
}
Some(AesKey) | Some(AesKeyError) => {
self.audio_key().dispatch(packet_type.unwrap(), data);
}
Some(MercuryReq) | Some(MercurySub) | Some(MercuryUnsub) | Some(MercuryEvent) => {
self.mercury().dispatch(packet_type.unwrap(), data);
}
Some(PongAck)
| Some(SecretBlock)
| Some(LegacyWelcome)
| Some(UnknownDataAllZeros)
| Some(ProductInfo)
| Some(LicenseVersion) => {}
_ => {
if let Some(packet_type) = PacketType::from_u8(cmd) {
trace!("Ignoring {:?} packet with data {:?}", packet_type, data);
} else {
trace!("Ignoring unknown packet {:x}", cmd);
}
}
}
}
pub fn send_packet(&self, cmd: u8, data: Vec<u8>) {
self.0.tx_connection.send((cmd, data)).unwrap();
pub fn send_packet(&self, cmd: PacketType, data: Vec<u8>) {
self.0.tx_connection.send((cmd as u8, data)).unwrap();
}
pub fn cache(&self) -> Option<&Arc<Cache>> {
self.0.cache.as_ref()
}
fn config(&self) -> &SessionConfig {
pub fn config(&self) -> &SessionConfig {
&self.0.config
}

1
core/src/spclient.rs Normal file
View file

@ -0,0 +1 @@
// https://github.com/librespot-org/librespot-java/blob/27783e06f456f95228c5ac37acf2bff8c1a8a0c4/lib/src/main/java/xyz/gianlu/librespot/dealer/ApiClient.java

View file

@ -116,22 +116,25 @@ impl SpotifyId {
///
/// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn from_uri(src: &str) -> Result<SpotifyId, SpotifyIdError> {
// We expect the ID to be the last colon-delimited item in the URI.
let b = src.as_bytes();
let id_i = b.len() - SpotifyId::SIZE_BASE62;
if b[id_i - 1] != b':' {
let src = src.strip_prefix("spotify:").ok_or(SpotifyIdError)?;
if src.len() <= SpotifyId::SIZE_BASE62 {
return Err(SpotifyIdError);
}
let mut id = SpotifyId::from_base62(&src[id_i..])?;
let colon_index = src.len() - SpotifyId::SIZE_BASE62 - 1;
// Slice offset by 8 as we are skipping the "spotify:" prefix.
id.audio_type = src[8..id_i - 1].into();
if src.as_bytes()[colon_index] != b':' {
return Err(SpotifyIdError);
}
let mut id = SpotifyId::from_base62(&src[colon_index + 1..])?;
id.audio_type = src[..colon_index].into();
Ok(id)
}
/// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE62` (22)
/// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE16` (32)
/// character long `String`.
pub fn to_base16(&self) -> String {
to_base16(&self.to_raw(), &mut [0u8; SpotifyId::SIZE_BASE16])
@ -305,7 +308,7 @@ mod tests {
},
];
static CONV_INVALID: [ConversionCase; 2] = [
static CONV_INVALID: [ConversionCase; 3] = [
ConversionCase {
id: 0,
kind: SpotifyAudioType::NonPlayable,
@ -330,6 +333,18 @@ mod tests {
154, 27, 28, 251,
],
},
ConversionCase {
id: 0,
kind: SpotifyAudioType::NonPlayable,
// Uri too short
uri: "spotify:azb:aRS48xBl0tH",
base16: "--------------------",
base62: "....................",
raw: &[
// Invalid length.
154, 27, 28, 251,
],
},
];
#[test]

131
core/src/token.rs Normal file
View file

@ -0,0 +1,131 @@
// Ported from librespot-java. Relicensed under MIT with permission.
// Known scopes:
// ugc-image-upload, playlist-read-collaborative, playlist-modify-private,
// playlist-modify-public, playlist-read-private, user-read-playback-position,
// user-read-recently-played, user-top-read, user-modify-playback-state,
// user-read-currently-playing, user-read-playback-state, user-read-private, user-read-email,
// user-library-modify, user-library-read, user-follow-modify, user-follow-read, streaming,
// app-remote-control
use crate::mercury::MercuryError;
use serde::Deserialize;
use std::error::Error;
use std::time::{Duration, Instant};
component! {
TokenProvider : TokenProviderInner {
tokens: Vec<Token> = vec![],
}
}
#[derive(Clone, Debug)]
pub struct Token {
access_token: String,
expires_in: Duration,
token_type: String,
scopes: Vec<String>,
timestamp: Instant,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct TokenData {
access_token: String,
expires_in: u64,
token_type: String,
scope: Vec<String>,
}
impl TokenProvider {
const KEYMASTER_CLIENT_ID: &'static str = "65b708073fc0480ea92a077233ca87bd";
fn find_token(&self, scopes: Vec<&str>) -> Option<usize> {
self.lock(|inner| {
for i in 0..inner.tokens.len() {
if inner.tokens[i].in_scopes(scopes.clone()) {
return Some(i);
}
}
None
})
}
// scopes must be comma-separated
pub async fn get_token(&self, scopes: &str) -> Result<Token, MercuryError> {
if scopes.is_empty() {
return Err(MercuryError);
}
if let Some(index) = self.find_token(scopes.split(',').collect()) {
let cached_token = self.lock(|inner| inner.tokens[index].clone());
if cached_token.is_expired() {
self.lock(|inner| inner.tokens.remove(index));
} else {
return Ok(cached_token);
}
}
trace!(
"Requested token in scopes {:?} unavailable or expired, requesting new token.",
scopes
);
let query_uri = format!(
"hm://keymaster/token/authenticated?scope={}&client_id={}&device_id={}",
scopes,
Self::KEYMASTER_CLIENT_ID,
self.session().device_id()
);
let request = self.session().mercury().get(query_uri);
let response = request.await?;
let data = response
.payload
.first()
.expect("No tokens received")
.to_vec();
let token = Token::new(String::from_utf8(data).unwrap()).map_err(|_| MercuryError)?;
trace!("Got token: {:?}", token);
self.lock(|inner| inner.tokens.push(token.clone()));
Ok(token)
}
}
impl Token {
const EXPIRY_THRESHOLD: Duration = Duration::from_secs(10);
pub fn new(body: String) -> Result<Self, Box<dyn Error>> {
let data: TokenData = serde_json::from_slice(body.as_ref())?;
Ok(Self {
access_token: data.access_token,
expires_in: Duration::from_secs(data.expires_in),
token_type: data.token_type,
scopes: data.scope,
timestamp: Instant::now(),
})
}
pub fn is_expired(&self) -> bool {
self.timestamp + (self.expires_in - Self::EXPIRY_THRESHOLD) < Instant::now()
}
pub fn in_scope(&self, scope: &str) -> bool {
for s in &self.scopes {
if *s == scope {
return true;
}
}
false
}
pub fn in_scopes(&self, scopes: Vec<&str>) -> bool {
for s in scopes {
if !self.in_scope(s) {
return false;
}
}
true
}
}

40
discovery/Cargo.toml Normal file
View file

@ -0,0 +1,40 @@
[package]
name = "librespot-discovery"
version = "0.2.0"
authors = ["Paul Lietar <paul@lietar.net>"]
description = "The discovery logic for librespot"
license = "MIT"
repository = "https://github.com/librespot-org/librespot"
edition = "2018"
[dependencies]
aes-ctr = "0.6"
base64 = "0.13"
cfg-if = "1.0"
form_urlencoded = "1.0"
futures-core = "0.3"
hmac = "0.11"
hyper = { version = "0.14", features = ["server", "http1", "tcp"] }
libmdns = "0.6"
log = "0.4"
rand = "0.8"
serde_json = "1.0.25"
sha-1 = "0.9"
thiserror = "1.0"
tokio = { version = "1.0", features = ["sync", "rt"] }
dns-sd = { version = "0.1.3", optional = true }
[dependencies.librespot-core]
path = "../core"
default_features = false
version = "0.2.0"
[dev-dependencies]
futures = "0.3"
hex = "0.4"
simple_logger = "1.11"
tokio = { version = "1.0", features = ["macros", "rt"] }
[features]
with-dns-sd = ["dns-sd"]

View file

@ -0,0 +1,25 @@
use futures::StreamExt;
use librespot_discovery::DeviceType;
use sha1::{Digest, Sha1};
use simple_logger::SimpleLogger;
#[tokio::main(flavor = "current_thread")]
async fn main() {
SimpleLogger::new()
.with_level(log::LevelFilter::Debug)
.init()
.unwrap();
let name = "Librespot";
let device_id = hex::encode(Sha1::digest(name.as_bytes()));
let mut server = librespot_discovery::Discovery::builder(device_id)
.name(name)
.device_type(DeviceType::Computer)
.launch()
.unwrap();
while let Some(x) = server.next().await {
println!("Received {:?}", x);
}
}

150
discovery/src/lib.rs Normal file
View file

@ -0,0 +1,150 @@
//! Advertises this device to Spotify clients in the local network.
//!
//! This device will show up in the list of "available devices".
//! Once it is selected from the list, [`Credentials`] are received.
//! Those can be used to establish a new Session with [`librespot_core`].
//!
//! This library uses mDNS and DNS-SD so that other devices can find it,
//! and spawns an http server to answer requests of Spotify clients.
#![warn(clippy::all, missing_docs, rust_2018_idioms)]
mod server;
use std::borrow::Cow;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use cfg_if::cfg_if;
use futures_core::Stream;
use librespot_core as core;
use thiserror::Error;
use self::server::DiscoveryServer;
/// Credentials to be used in [`librespot`](`librespot_core`).
pub use crate::core::authentication::Credentials;
/// Determining the icon in the list of available devices.
pub use crate::core::config::DeviceType;
/// Makes this device visible to Spotify clients in the local network.
///
/// `Discovery` implements the [`Stream`] trait. Every time this device
/// is selected in the list of available devices, it yields [`Credentials`].
pub struct Discovery {
server: DiscoveryServer,
#[cfg(not(feature = "with-dns-sd"))]
_svc: libmdns::Service,
#[cfg(feature = "with-dns-sd")]
_svc: dns_sd::DNSService,
}
/// A builder for [`Discovery`].
pub struct Builder {
server_config: server::Config,
port: u16,
}
/// Errors that can occur while setting up a [`Discovery`] instance.
#[derive(Debug, Error)]
pub enum Error {
/// Setting up service discovery via DNS-SD failed.
#[error("Setting up dns-sd failed: {0}")]
DnsSdError(#[from] io::Error),
/// Setting up the http server failed.
#[error("Setting up the http server failed: {0}")]
HttpServerError(#[from] hyper::Error),
}
impl Builder {
/// Starts a new builder using the provided device id.
pub fn new(device_id: impl Into<String>) -> Self {
Self {
server_config: server::Config {
name: "Librespot".into(),
device_type: DeviceType::default(),
device_id: device_id.into(),
},
port: 0,
}
}
/// Sets the name to be displayed. Default is `"Librespot"`.
pub fn name(mut self, name: impl Into<Cow<'static, str>>) -> Self {
self.server_config.name = name.into();
self
}
/// Sets the device type which is visible as icon in other Spotify clients. Default is `Speaker`.
pub fn device_type(mut self, device_type: DeviceType) -> Self {
self.server_config.device_type = device_type;
self
}
/// Sets the port on which it should listen to incoming connections.
/// The default value `0` means any port.
pub fn port(mut self, port: u16) -> Self {
self.port = port;
self
}
/// Sets up the [`Discovery`] instance.
///
/// # Errors
/// If setting up the mdns service or creating the server fails, this function returns an error.
pub fn launch(self) -> Result<Discovery, Error> {
let mut port = self.port;
let name = self.server_config.name.clone().into_owned();
let server = DiscoveryServer::new(self.server_config, &mut port)?;
let svc;
cfg_if! {
if #[cfg(feature = "with-dns-sd")] {
svc = dns_sd::DNSService::register(
Some(name.as_ref()),
"_spotify-connect._tcp",
None,
None,
port,
&["VERSION=1.0", "CPath=/"],
)
.unwrap();
} else {
let responder = libmdns::Responder::spawn(&tokio::runtime::Handle::current())?;
svc = responder.register(
"_spotify-connect._tcp".to_owned(),
name,
port,
&["VERSION=1.0", "CPath=/"],
)
}
};
Ok(Discovery { server, _svc: svc })
}
}
impl Discovery {
/// Starts a [`Builder`] with the provided device id.
pub fn builder(device_id: impl Into<String>) -> Builder {
Builder::new(device_id)
}
/// Create a new instance with the specified device id and default paramaters.
pub fn new(device_id: impl Into<String>) -> Result<Self, Error> {
Self::builder(device_id).launch()
}
}
impl Stream for Discovery {
type Item = Credentials;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.server).poll_next(cx)
}
}

236
discovery/src/server.rs Normal file
View file

@ -0,0 +1,236 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::Infallible;
use std::net::{Ipv4Addr, SocketAddr};
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
use aes_ctr::cipher::generic_array::GenericArray;
use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher};
use aes_ctr::Aes128Ctr;
use futures_core::Stream;
use hmac::{Hmac, Mac, NewMac};
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Method, Request, Response, StatusCode};
use log::{debug, warn};
use serde_json::json;
use sha1::{Digest, Sha1};
use tokio::sync::{mpsc, oneshot};
use crate::core::authentication::Credentials;
use crate::core::config::DeviceType;
use crate::core::diffie_hellman::DhLocalKeys;
type Params<'a> = BTreeMap<Cow<'a, str>, Cow<'a, str>>;
pub struct Config {
pub name: Cow<'static, str>,
pub device_type: DeviceType,
pub device_id: String,
}
struct RequestHandler {
config: Config,
keys: DhLocalKeys,
tx: mpsc::UnboundedSender<Credentials>,
}
impl RequestHandler {
fn new(config: Config) -> (Self, mpsc::UnboundedReceiver<Credentials>) {
let (tx, rx) = mpsc::unbounded_channel();
let discovery = Self {
config,
keys: DhLocalKeys::random(&mut rand::thread_rng()),
tx,
};
(discovery, rx)
}
fn handle_get_info(&self) -> Response<hyper::Body> {
let public_key = base64::encode(&self.keys.public_key());
let device_type: &str = self.config.device_type.into();
let body = json!({
"status": 101,
"statusString": "ERROR-OK",
"spotifyError": 0,
"version": "2.7.1",
"deviceID": (self.config.device_id),
"remoteName": (self.config.name),
"activeUser": "",
"publicKey": (public_key),
"deviceType": (device_type),
"libraryVersion": crate::core::version::SEMVER,
"accountReq": "PREMIUM",
"brandDisplayName": "librespot",
"modelDisplayName": "librespot",
"resolverVersion": "0",
"groupStatus": "NONE",
"voiceSupport": "NO",
})
.to_string();
Response::new(Body::from(body))
}
fn handle_add_user(&self, params: &Params<'_>) -> Response<hyper::Body> {
let username = params.get("userName").unwrap().as_ref();
let encrypted_blob = params.get("blob").unwrap();
let client_key = params.get("clientKey").unwrap();
let encrypted_blob = base64::decode(encrypted_blob.as_bytes()).unwrap();
let client_key = base64::decode(client_key.as_bytes()).unwrap();
let shared_key = self.keys.shared_secret(&client_key);
let iv = &encrypted_blob[0..16];
let encrypted = &encrypted_blob[16..encrypted_blob.len() - 20];
let cksum = &encrypted_blob[encrypted_blob.len() - 20..encrypted_blob.len()];
let base_key = Sha1::digest(&shared_key);
let base_key = &base_key[..16];
let checksum_key = {
let mut h =
Hmac::<Sha1>::new_from_slice(base_key).expect("HMAC can take key of any size");
h.update(b"checksum");
h.finalize().into_bytes()
};
let encryption_key = {
let mut h =
Hmac::<Sha1>::new_from_slice(base_key).expect("HMAC can take key of any size");
h.update(b"encryption");
h.finalize().into_bytes()
};
let mut h =
Hmac::<Sha1>::new_from_slice(&checksum_key).expect("HMAC can take key of any size");
h.update(encrypted);
if h.verify(cksum).is_err() {
warn!("Login error for user {:?}: MAC mismatch", username);
let result = json!({
"status": 102,
"spotifyError": 1,
"statusString": "ERROR-MAC"
});
let body = result.to_string();
return Response::new(Body::from(body));
}
let decrypted = {
let mut data = encrypted.to_vec();
let mut cipher = Aes128Ctr::new(
GenericArray::from_slice(&encryption_key[0..16]),
GenericArray::from_slice(iv),
);
cipher.apply_keystream(&mut data);
String::from_utf8(data).unwrap()
};
let credentials =
Credentials::with_blob(username.to_string(), &decrypted, &self.config.device_id);
self.tx.send(credentials).unwrap();
let result = json!({
"status": 101,
"spotifyError": 0,
"statusString": "ERROR-OK"
});
let body = result.to_string();
Response::new(Body::from(body))
}
fn not_found(&self) -> Response<hyper::Body> {
let mut res = Response::default();
*res.status_mut() = StatusCode::NOT_FOUND;
res
}
async fn handle(self: Arc<Self>, request: Request<Body>) -> hyper::Result<Response<Body>> {
let mut params = Params::new();
let (parts, body) = request.into_parts();
if let Some(query) = parts.uri.query() {
let query_params = form_urlencoded::parse(query.as_bytes());
params.extend(query_params);
}
if parts.method != Method::GET {
debug!("{:?} {:?} {:?}", parts.method, parts.uri.path(), params);
}
let body = hyper::body::to_bytes(body).await?;
params.extend(form_urlencoded::parse(&body));
let action = params.get("action").map(Cow::as_ref);
Ok(match (parts.method, action) {
(Method::GET, Some("getInfo")) => self.handle_get_info(),
(Method::POST, Some("addUser")) => self.handle_add_user(&params),
_ => self.not_found(),
})
}
}
pub struct DiscoveryServer {
cred_rx: mpsc::UnboundedReceiver<Credentials>,
_close_tx: oneshot::Sender<Infallible>,
}
impl DiscoveryServer {
pub fn new(config: Config, port: &mut u16) -> hyper::Result<Self> {
let (discovery, cred_rx) = RequestHandler::new(config);
let discovery = Arc::new(discovery);
let (close_tx, close_rx) = oneshot::channel();
let address = SocketAddr::new(Ipv4Addr::UNSPECIFIED.into(), *port);
let make_service = make_service_fn(move |_| {
let discovery = discovery.clone();
async move {
Ok::<_, hyper::Error>(service_fn(move |request| discovery.clone().handle(request)))
}
});
let server = hyper::Server::try_bind(&address)?.serve(make_service);
*port = server.local_addr().port();
debug!("Zeroconf server listening on 0.0.0.0:{}", *port);
tokio::spawn(async {
let result = server
.with_graceful_shutdown(async {
close_rx.await.unwrap_err();
debug!("Shutting down discovery server");
})
.await;
if let Err(e) = result {
warn!("Discovery server failed: {}", e);
}
});
Ok(Self {
cred_rx,
_close_tx: close_tx,
})
}
}
impl Stream for DiscoveryServer {
type Item = Credentials;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Credentials>> {
self.cred_rx.poll_recv(cx)
}
}

View file

@ -2,7 +2,6 @@ use std::env;
use librespot::core::authentication::Credentials;
use librespot::core::config::SessionConfig;
use librespot::core::keymaster;
use librespot::core::session::Session;
const SCOPES: &str =
@ -13,8 +12,8 @@ async fn main() {
let session_config = SessionConfig::default();
let args: Vec<_> = env::args().collect();
if args.len() != 4 {
eprintln!("Usage: {} USERNAME PASSWORD CLIENT_ID", args[0]);
if args.len() != 3 {
eprintln!("Usage: {} USERNAME PASSWORD", args[0]);
return;
}
@ -26,8 +25,6 @@ async fn main() {
println!(
"Token: {:#?}",
keymaster::get_token(&session, &args[3], SCOPES)
.await
.unwrap()
session.token_provider().get_token(SCOPES).await.unwrap()
);
}

View file

@ -2,6 +2,7 @@ use byteorder::{BigEndian, WriteBytesExt};
use std::io::Write;
use librespot_core::channel::ChannelData;
use librespot_core::packet::PacketType;
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
@ -13,7 +14,7 @@ pub fn get(session: &Session, file: FileId) -> ChannelData {
packet.write_u16::<BigEndian>(channel_id).unwrap();
packet.write_u16::<BigEndian>(0).unwrap();
packet.write(&file.0).unwrap();
session.send_packet(0x19, packet);
session.send_packet(PacketType::Image, packet);
data
}

View file

@ -18,15 +18,15 @@ path = "../metadata"
version = "0.2.0"
[dependencies]
cfg-if = "1.0"
futures-executor = "0.3"
futures-util = { version = "0.3", default_features = false, features = ["alloc"] }
log = "0.4"
byteorder = "1.4"
shell-words = "1.0.0"
tokio = { version = "1", features = ["sync"] }
zerocopy = { version = "0.3" }
zerocopy = { version = "0.3" }
# Backends
alsa = { version = "0.5", optional = true }
portaudio-rs = { version = "0.3", optional = true }
libpulse-binding = { version = "2", optional = true, default-features = false }
@ -42,14 +42,16 @@ rodio = { version = "0.14", optional = true, default-features = false
cpal = { version = "0.13", optional = true }
thiserror = { version = "1", optional = true }
# Decoders
lewton = "0.10" # Currently not optional because of limitations of cargo features
librespot-tremor = { version = "0.2", optional = true }
# Decoder
lewton = "0.10"
ogg = "0.8"
vorbis = { version ="0.0", optional = true }
# Dithering
rand = "0.8"
rand_distr = "0.4"
[features]
alsa-backend = ["alsa"]
alsa-backend = ["alsa", "thiserror"]
portaudio-backend = ["portaudio-rs"]
pulseaudio-backend = ["libpulse-binding", "libpulse-simple-binding"]
jackaudio-backend = ["jack"]
@ -57,6 +59,3 @@ rodio-backend = ["rodio", "cpal", "thiserror"]
rodiojack-backend = ["rodio", "cpal/jack", "thiserror"]
sdl-backend = ["sdl2"]
gstreamer-backend = ["gstreamer", "gstreamer-app", "glib"]
with-tremor = ["librespot-tremor"]
with-vorbis = ["vorbis"]

View file

@ -1,95 +1,189 @@
use super::{Open, Sink, SinkAsBytes};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLES_PER_SECOND, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
use alsa::device_name::HintIter;
use alsa::pcm::{Access, Format, Frames, HwParams, PCM};
use alsa::{Direction, Error, ValueOr};
use alsa::pcm::{Access, Format, HwParams, PCM};
use alsa::{Direction, ValueOr};
use std::cmp::min;
use std::ffi::CString;
use std::io;
use std::process::exit;
use std::time::Duration;
use thiserror::Error;
const BUFFERED_LATENCY: f32 = 0.125; // seconds
const BUFFERED_PERIODS: Frames = 4;
// 125 ms Period time * 4 periods = 0.5 sec buffer.
const PERIOD_TIME: Duration = Duration::from_millis(125);
const NUM_PERIODS: u32 = 4;
#[derive(Debug, Error)]
enum AlsaError {
#[error("AlsaSink, device {device} may be invalid or busy, {err}")]
PcmSetUp { device: String, err: alsa::Error },
#[error("AlsaSink, device {device} unsupported access type RWInterleaved, {err}")]
UnsupportedAccessType { device: String, err: alsa::Error },
#[error("AlsaSink, device {device} unsupported format {format:?}, {err}")]
UnsupportedFormat {
device: String,
format: AudioFormat,
err: alsa::Error,
},
#[error("AlsaSink, device {device} unsupported sample rate {samplerate}, {err}")]
UnsupportedSampleRate {
device: String,
samplerate: u32,
err: alsa::Error,
},
#[error("AlsaSink, device {device} unsupported channel count {channel_count}, {err}")]
UnsupportedChannelCount {
device: String,
channel_count: u8,
err: alsa::Error,
},
#[error("AlsaSink Hardware Parameters Error, {0}")]
HwParams(alsa::Error),
#[error("AlsaSink Software Parameters Error, {0}")]
SwParams(alsa::Error),
#[error("AlsaSink PCM Error, {0}")]
Pcm(alsa::Error),
}
pub struct AlsaSink {
pcm: Option<PCM>,
format: AudioFormat,
device: String,
buffer: Vec<u8>,
period_buffer: Vec<u8>,
}
fn list_outputs() {
fn list_outputs() -> io::Result<()> {
println!("Listing available Alsa outputs:");
for t in &["pcm", "ctl", "hwdep"] {
println!("{} devices:", t);
let i = HintIter::new(None, &*CString::new(*t).unwrap()).unwrap();
let i = match HintIter::new_str(None, &t) {
Ok(i) => i,
Err(e) => {
return Err(io::Error::new(io::ErrorKind::Other, e));
}
};
for a in i {
if let Some(Direction::Playback) = a.direction {
// mimic aplay -L
println!(
"{}\n\t{}\n",
a.name.unwrap(),
a.desc.unwrap().replace("\n", "\n\t")
);
let name = a
.name
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Could not parse name"))?;
let desc = a
.desc
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Could not parse desc"))?;
println!("{}\n\t{}\n", name, desc.replace("\n", "\n\t"));
}
}
}
Ok(())
}
fn open_device(dev_name: &str, format: AudioFormat) -> Result<(PCM, Frames), Box<Error>> {
let pcm = PCM::new(dev_name, Direction::Playback, false)?;
fn open_device(dev_name: &str, format: AudioFormat) -> Result<(PCM, usize), AlsaError> {
let pcm = PCM::new(dev_name, Direction::Playback, false).map_err(|e| AlsaError::PcmSetUp {
device: dev_name.to_string(),
err: e,
})?;
let alsa_format = match format {
AudioFormat::F64 => Format::float64(),
AudioFormat::F32 => Format::float(),
AudioFormat::S32 => Format::s32(),
AudioFormat::S24 => Format::s24(),
AudioFormat::S24_3 => Format::S243LE,
AudioFormat::S16 => Format::s16(),
#[cfg(target_endian = "little")]
AudioFormat::S24_3 => Format::S243LE,
#[cfg(target_endian = "big")]
AudioFormat::S24_3 => Format::S243BE,
};
// http://www.linuxjournal.com/article/6735?page=0,1#N0x19ab2890.0x19ba78d8
// latency = period_size * periods / (rate * bytes_per_frame)
// For stereo samples encoded as 32-bit float, one frame has a length of eight bytes.
let mut period_size = ((SAMPLES_PER_SECOND * format.size() as u32) as f32
* (BUFFERED_LATENCY / BUFFERED_PERIODS as f32)) as Frames;
{
let hwp = HwParams::any(&pcm)?;
hwp.set_access(Access::RWInterleaved)?;
hwp.set_format(alsa_format)?;
hwp.set_rate(SAMPLE_RATE, ValueOr::Nearest)?;
hwp.set_channels(NUM_CHANNELS as u32)?;
period_size = hwp.set_period_size_near(period_size, ValueOr::Greater)?;
hwp.set_buffer_size_near(period_size * BUFFERED_PERIODS)?;
pcm.hw_params(&hwp)?;
let bytes_per_period = {
let hwp = HwParams::any(&pcm).map_err(AlsaError::HwParams)?;
hwp.set_access(Access::RWInterleaved)
.map_err(|e| AlsaError::UnsupportedAccessType {
device: dev_name.to_string(),
err: e,
})?;
let swp = pcm.sw_params_current()?;
swp.set_start_threshold(hwp.get_buffer_size()? - hwp.get_period_size()?)?;
pcm.sw_params(&swp)?;
}
hwp.set_format(alsa_format)
.map_err(|e| AlsaError::UnsupportedFormat {
device: dev_name.to_string(),
format,
err: e,
})?;
Ok((pcm, period_size))
hwp.set_rate(SAMPLE_RATE, ValueOr::Nearest).map_err(|e| {
AlsaError::UnsupportedSampleRate {
device: dev_name.to_string(),
samplerate: SAMPLE_RATE,
err: e,
}
})?;
hwp.set_channels(NUM_CHANNELS as u32)
.map_err(|e| AlsaError::UnsupportedChannelCount {
device: dev_name.to_string(),
channel_count: NUM_CHANNELS,
err: e,
})?;
// Deal strictly in time and periods.
hwp.set_periods(NUM_PERIODS, ValueOr::Nearest)
.map_err(AlsaError::HwParams)?;
hwp.set_period_time_near(PERIOD_TIME.as_micros() as u32, ValueOr::Nearest)
.map_err(AlsaError::HwParams)?;
pcm.hw_params(&hwp).map_err(AlsaError::Pcm)?;
let swp = pcm.sw_params_current().map_err(AlsaError::Pcm)?;
// Don't assume we got what we wanted.
// Ask to make sure.
let frames_per_period = hwp.get_period_size().map_err(AlsaError::HwParams)?;
let frames_per_buffer = hwp.get_buffer_size().map_err(AlsaError::HwParams)?;
swp.set_start_threshold(frames_per_buffer - frames_per_period)
.map_err(AlsaError::SwParams)?;
pcm.sw_params(&swp).map_err(AlsaError::Pcm)?;
// Let ALSA do the math for us.
pcm.frames_to_bytes(frames_per_period) as usize
};
Ok((pcm, bytes_per_period))
}
impl Open for AlsaSink {
fn open(device: Option<String>, format: AudioFormat) -> Self {
info!("Using Alsa sink with format: {:?}", format);
let name = match device.as_ref().map(AsRef::as_ref) {
Some("?") => {
println!("Listing available Alsa outputs:");
list_outputs();
exit(0)
}
let name = match device.as_deref() {
Some("?") => match list_outputs() {
Ok(_) => {
exit(0);
}
Err(err) => {
error!("Error listing Alsa outputs, {}", err);
exit(1);
}
},
Some(device) => device,
None => "default",
}
.to_string();
info!("Using AlsaSink with format: {:?}", format);
Self {
pcm: None,
format,
device: name,
buffer: vec![],
period_buffer: vec![],
}
}
}
@ -97,21 +191,13 @@ impl Open for AlsaSink {
impl Sink for AlsaSink {
fn start(&mut self) -> io::Result<()> {
if self.pcm.is_none() {
let pcm = open_device(&self.device, self.format);
match pcm {
Ok((p, period_size)) => {
self.pcm = Some(p);
// Create a buffer for all samples for a full period
self.buffer = Vec::with_capacity(
period_size as usize * BUFFERED_PERIODS as usize * self.format.size(),
);
match open_device(&self.device, self.format) {
Ok((pcm, bytes_per_period)) => {
self.pcm = Some(pcm);
self.period_buffer = Vec::with_capacity(bytes_per_period);
}
Err(e) => {
error!("Alsa error PCM open {}", e);
return Err(io::Error::new(
io::ErrorKind::Other,
"Alsa error: PCM open failed",
));
return Err(io::Error::new(io::ErrorKind::Other, e));
}
}
}
@ -123,9 +209,16 @@ impl Sink for AlsaSink {
{
// Write any leftover data in the period buffer
// before draining the actual buffer
self.write_bytes(&[]).expect("could not flush buffer");
let pcm = self.pcm.as_mut().unwrap();
pcm.drain().unwrap();
self.write_bytes(&[])?;
let pcm = self.pcm.as_mut().ok_or_else(|| {
io::Error::new(io::ErrorKind::Other, "Error stopping AlsaSink, PCM is None")
})?;
pcm.drain().map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!("Error stopping AlsaSink {}", e),
)
})?
}
self.pcm = None;
Ok(())
@ -139,15 +232,15 @@ impl SinkAsBytes for AlsaSink {
let mut processed_data = 0;
while processed_data < data.len() {
let data_to_buffer = min(
self.buffer.capacity() - self.buffer.len(),
self.period_buffer.capacity() - self.period_buffer.len(),
data.len() - processed_data,
);
self.buffer
self.period_buffer
.extend_from_slice(&data[processed_data..processed_data + data_to_buffer]);
processed_data += data_to_buffer;
if self.buffer.len() == self.buffer.capacity() {
self.write_buf();
self.buffer.clear();
if self.period_buffer.len() == self.period_buffer.capacity() {
self.write_buf()?;
self.period_buffer.clear();
}
}
@ -156,12 +249,34 @@ impl SinkAsBytes for AlsaSink {
}
impl AlsaSink {
fn write_buf(&mut self) {
let pcm = self.pcm.as_mut().unwrap();
pub const NAME: &'static str = "alsa";
fn write_buf(&mut self) -> io::Result<()> {
let pcm = self.pcm.as_mut().ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
"Error writing from AlsaSink buffer to PCM, PCM is None",
)
})?;
let io = pcm.io_bytes();
match io.writei(&self.buffer) {
Ok(_) => (),
Err(err) => pcm.try_recover(err, false).unwrap(),
};
if let Err(err) = io.writei(&self.period_buffer) {
// Capture and log the original error as a warning, and then try to recover.
// If recovery fails then forward that error back to player.
warn!(
"Error writing from AlsaSink buffer to PCM, trying to recover {}",
err
);
pcm.try_recover(err, false).map_err(|e| {
io::Error::new(
io::ErrorKind::Other,
format!(
"Error writing from AlsaSink buffer to PCM, recovery failed {}",
e
),
)
})?
}
Ok(())
}
}

View file

@ -1,7 +1,8 @@
use super::{Open, Sink, SinkAsBytes};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
use gstreamer as gst;
use gstreamer_app as gst_app;
@ -33,11 +34,17 @@ impl Open for GstreamerSink {
let sample_size = format.size();
let gst_bytes = 2048 * sample_size;
#[cfg(target_endian = "little")]
const ENDIANNESS: &str = "LE";
#[cfg(target_endian = "big")]
const ENDIANNESS: &str = "BE";
let pipeline_str_preamble = format!(
"appsrc caps=\"audio/x-raw,format={}LE,layout=interleaved,channels={},rate={}\" block=true max-bytes={} name=appsrc0 ",
gst_format, NUM_CHANNELS, SAMPLE_RATE, gst_bytes
"appsrc caps=\"audio/x-raw,format={}{},layout=interleaved,channels={},rate={}\" block=true max-bytes={} name=appsrc0 ",
gst_format, ENDIANNESS, NUM_CHANNELS, SAMPLE_RATE, gst_bytes
);
let pipeline_str_rest = r#" ! audioconvert ! autoaudiosink"#;
// no need to dither twice; use librespot dithering instead
let pipeline_str_rest = r#" ! audioconvert dithering=none ! autoaudiosink"#;
let pipeline_str: String = match device {
Some(x) => format!("{}{}", pipeline_str_preamble, x),
None => format!("{}{}", pipeline_str_preamble, pipeline_str_rest),
@ -120,7 +127,6 @@ impl Open for GstreamerSink {
}
impl Sink for GstreamerSink {
start_stop_noop!();
sink_as_bytes!();
}
@ -133,3 +139,7 @@ impl SinkAsBytes for GstreamerSink {
Ok(())
}
}
impl GstreamerSink {
pub const NAME: &'static str = "gstreamer";
}

View file

@ -1,7 +1,8 @@
use super::{Open, Sink};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::NUM_CHANNELS;
use crate::NUM_CHANNELS;
use jack::{
AsyncClient, AudioOut, Client, ClientOptions, Control, Port, ProcessHandler, ProcessScope,
};
@ -69,11 +70,10 @@ impl Open for JackSink {
}
impl Sink for JackSink {
start_stop_noop!();
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
for s in packet.samples().iter() {
let res = self.send.send(*s);
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()> {
let samples_f32: &[f32] = &converter.f64_to_f32(packet.samples());
for sample in samples_f32.iter() {
let res = self.send.send(*sample);
if res.is_err() {
error!("cannot write to channel");
}
@ -81,3 +81,7 @@ impl Sink for JackSink {
Ok(())
}
}
impl JackSink {
pub const NAME: &'static str = "jackaudio";
}

View file

@ -1,4 +1,5 @@
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use std::io;
@ -7,9 +8,13 @@ pub trait Open {
}
pub trait Sink {
fn start(&mut self) -> io::Result<()>;
fn stop(&mut self) -> io::Result<()>;
fn write(&mut self, packet: &AudioPacket) -> io::Result<()>;
fn start(&mut self) -> io::Result<()> {
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
Ok(())
}
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()>;
}
pub type SinkBuilder = fn(Option<String>, AudioFormat) -> Box<dyn Sink>;
@ -25,26 +30,30 @@ fn mk_sink<S: Sink + Open + 'static>(device: Option<String>, format: AudioFormat
// reuse code for various backends
macro_rules! sink_as_bytes {
() => {
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
use crate::convert::{self, i24};
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()> {
use crate::convert::i24;
use zerocopy::AsBytes;
match packet {
AudioPacket::Samples(samples) => match self.format {
AudioFormat::F32 => self.write_bytes(samples.as_bytes()),
AudioFormat::F64 => self.write_bytes(samples.as_bytes()),
AudioFormat::F32 => {
let samples_f32: &[f32] = &converter.f64_to_f32(samples);
self.write_bytes(samples_f32.as_bytes())
}
AudioFormat::S32 => {
let samples_s32: &[i32] = &convert::to_s32(samples);
let samples_s32: &[i32] = &converter.f64_to_s32(samples);
self.write_bytes(samples_s32.as_bytes())
}
AudioFormat::S24 => {
let samples_s24: &[i32] = &convert::to_s24(samples);
let samples_s24: &[i32] = &converter.f64_to_s24(samples);
self.write_bytes(samples_s24.as_bytes())
}
AudioFormat::S24_3 => {
let samples_s24_3: &[i24] = &convert::to_s24_3(samples);
let samples_s24_3: &[i24] = &converter.f64_to_s24_3(samples);
self.write_bytes(samples_s24_3.as_bytes())
}
AudioFormat::S16 => {
let samples_s16: &[i16] = &convert::to_s16(samples);
let samples_s16: &[i16] = &converter.f64_to_s16(samples);
self.write_bytes(samples_s16.as_bytes())
}
},
@ -54,17 +63,6 @@ macro_rules! sink_as_bytes {
};
}
macro_rules! start_stop_noop {
() => {
fn start(&mut self) -> io::Result<()> {
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
Ok(())
}
};
}
#[cfg(feature = "alsa-backend")]
mod alsa;
#[cfg(feature = "alsa-backend")]
@ -92,6 +90,8 @@ use self::gstreamer::GstreamerSink;
#[cfg(any(feature = "rodio-backend", feature = "rodiojack-backend"))]
mod rodio;
#[cfg(any(feature = "rodio-backend", feature = "rodiojack-backend"))]
use self::rodio::RodioSink;
#[cfg(feature = "sdl-backend")]
mod sdl;
@ -105,24 +105,24 @@ mod subprocess;
use self::subprocess::SubprocessSink;
pub const BACKENDS: &[(&str, SinkBuilder)] = &[
#[cfg(feature = "alsa-backend")]
("alsa", mk_sink::<AlsaSink>),
#[cfg(feature = "portaudio-backend")]
("portaudio", mk_sink::<PortAudioSink>),
#[cfg(feature = "pulseaudio-backend")]
("pulseaudio", mk_sink::<PulseAudioSink>),
#[cfg(feature = "jackaudio-backend")]
("jackaudio", mk_sink::<JackSink>),
#[cfg(feature = "gstreamer-backend")]
("gstreamer", mk_sink::<GstreamerSink>),
#[cfg(feature = "rodio-backend")]
("rodio", rodio::mk_rodio),
(RodioSink::NAME, rodio::mk_rodio), // default goes first
#[cfg(feature = "alsa-backend")]
(AlsaSink::NAME, mk_sink::<AlsaSink>),
#[cfg(feature = "portaudio-backend")]
(PortAudioSink::NAME, mk_sink::<PortAudioSink>),
#[cfg(feature = "pulseaudio-backend")]
(PulseAudioSink::NAME, mk_sink::<PulseAudioSink>),
#[cfg(feature = "jackaudio-backend")]
(JackSink::NAME, mk_sink::<JackSink>),
#[cfg(feature = "gstreamer-backend")]
(GstreamerSink::NAME, mk_sink::<GstreamerSink>),
#[cfg(feature = "rodiojack-backend")]
("rodiojack", rodio::mk_rodiojack),
#[cfg(feature = "sdl-backend")]
("sdl", mk_sink::<SdlSink>),
("pipe", mk_sink::<StdoutSink>),
("subprocess", mk_sink::<SubprocessSink>),
(SdlSink::NAME, mk_sink::<SdlSink>),
(StdoutSink::NAME, mk_sink::<StdoutSink>),
(SubprocessSink::NAME, mk_sink::<SubprocessSink>),
];
pub fn find(name: Option<String>) -> Option<SinkBuilder> {

View file

@ -1,36 +1,66 @@
use super::{Open, Sink, SinkAsBytes};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use std::fs::OpenOptions;
use std::io::{self, Write};
pub struct StdoutSink {
output: Box<dyn Write>,
output: Option<Box<dyn Write>>,
path: Option<String>,
format: AudioFormat,
}
impl Open for StdoutSink {
fn open(path: Option<String>, format: AudioFormat) -> Self {
info!("Using pipe sink with format: {:?}", format);
let output: Box<dyn Write> = match path {
Some(path) => Box::new(OpenOptions::new().write(true).open(path).unwrap()),
_ => Box::new(io::stdout()),
};
Self { output, format }
Self {
output: None,
path,
format,
}
}
}
impl Sink for StdoutSink {
start_stop_noop!();
fn start(&mut self) -> io::Result<()> {
if self.output.is_none() {
let output: Box<dyn Write> = match self.path.as_deref() {
Some(path) => {
let open_op = OpenOptions::new()
.write(true)
.open(path)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
Box::new(open_op)
}
None => Box::new(io::stdout()),
};
self.output = Some(output);
}
Ok(())
}
sink_as_bytes!();
}
impl SinkAsBytes for StdoutSink {
fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
self.output.write_all(data)?;
self.output.flush()?;
match self.output.as_deref_mut() {
Some(output) => {
output.write_all(data)?;
output.flush()?;
}
None => {
return Err(io::Error::new(io::ErrorKind::Other, "Output is None"));
}
}
Ok(())
}
}
impl StdoutSink {
pub const NAME: &'static str = "pipe";
}

View file

@ -1,8 +1,8 @@
use super::{Open, Sink};
use crate::config::AudioFormat;
use crate::convert;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
use portaudio_rs::device::{get_default_output_index, DeviceIndex, DeviceInfo};
use portaudio_rs::stream::*;
use std::io;
@ -55,12 +55,9 @@ impl<'a> Open for PortAudioSink<'a> {
fn open(device: Option<String>, format: AudioFormat) -> PortAudioSink<'a> {
info!("Using PortAudio sink with format: {:?}", format);
warn!("This backend is known to panic on several platforms.");
warn!("Consider using some other backend, or better yet, contributing a fix.");
portaudio_rs::initialize().unwrap();
let device_idx = match device.as_ref().map(AsRef::as_ref) {
let device_idx = match device.as_deref() {
Some("?") => {
list_outputs();
exit(0)
@ -109,7 +106,7 @@ impl<'a> Sink for PortAudioSink<'a> {
Some(*$parameters),
SAMPLE_RATE as f64,
FRAMES_PER_BUFFER_UNSPECIFIED,
StreamFlags::empty(),
StreamFlags::DITHER_OFF, // no need to dither twice; use librespot dithering instead
None,
)
.unwrap(),
@ -136,15 +133,15 @@ impl<'a> Sink for PortAudioSink<'a> {
}};
}
match self {
Self::F32(stream, _parameters) => stop_sink!(ref mut stream),
Self::S32(stream, _parameters) => stop_sink!(ref mut stream),
Self::S16(stream, _parameters) => stop_sink!(ref mut stream),
Self::F32(stream, _) => stop_sink!(ref mut stream),
Self::S32(stream, _) => stop_sink!(ref mut stream),
Self::S16(stream, _) => stop_sink!(ref mut stream),
};
Ok(())
}
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()> {
macro_rules! write_sink {
(ref mut $stream: expr, $samples: expr) => {
$stream.as_mut().unwrap().write($samples)
@ -154,14 +151,15 @@ impl<'a> Sink for PortAudioSink<'a> {
let samples = packet.samples();
let result = match self {
Self::F32(stream, _parameters) => {
write_sink!(ref mut stream, samples)
let samples_f32: &[f32] = &converter.f64_to_f32(samples);
write_sink!(ref mut stream, samples_f32)
}
Self::S32(stream, _parameters) => {
let samples_s32: &[i32] = &convert::to_s32(samples);
let samples_s32: &[i32] = &converter.f64_to_s32(samples);
write_sink!(ref mut stream, samples_s32)
}
Self::S16(stream, _parameters) => {
let samples_s16: &[i16] = &convert::to_s16(samples);
let samples_s16: &[i16] = &converter.f64_to_s16(samples);
write_sink!(ref mut stream, samples_s16)
}
};
@ -180,3 +178,7 @@ impl<'a> Drop for PortAudioSink<'a> {
portaudio_rs::terminate().unwrap();
}
}
impl<'a> PortAudioSink<'a> {
pub const NAME: &'static str = "portaudio";
}

View file

@ -1,7 +1,8 @@
use super::{Open, Sink, SinkAsBytes};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
use libpulse_binding::{self as pulse, stream::Direction};
use libpulse_simple_binding::Simple;
use std::io;
@ -22,11 +23,14 @@ impl Open for PulseAudioSink {
// PulseAudio calls S24 and S24_3 different from the rest of the world
let pulse_format = match format {
AudioFormat::F32 => pulse::sample::Format::F32le,
AudioFormat::S32 => pulse::sample::Format::S32le,
AudioFormat::S24 => pulse::sample::Format::S24_32le,
AudioFormat::S24_3 => pulse::sample::Format::S24le,
AudioFormat::S16 => pulse::sample::Format::S16le,
AudioFormat::F32 => pulse::sample::Format::FLOAT32NE,
AudioFormat::S32 => pulse::sample::Format::S32NE,
AudioFormat::S24 => pulse::sample::Format::S24_32NE,
AudioFormat::S24_3 => pulse::sample::Format::S24NE,
AudioFormat::S16 => pulse::sample::Format::S16NE,
_ => {
unimplemented!("PulseAudio currently does not support {:?} output", format)
}
};
let ss = pulse::sample::Spec {
@ -51,7 +55,7 @@ impl Sink for PulseAudioSink {
return Ok(());
}
let device = self.device.as_ref().map(|s| (*s).as_str());
let device = self.device.as_deref();
let result = Simple::new(
None, // Use the default server.
APP_NAME, // Our application's name.
@ -100,3 +104,7 @@ impl SinkAsBytes for PulseAudioSink {
}
}
}
impl PulseAudioSink {
pub const NAME: &'static str = "pulseaudio";
}

View file

@ -1,14 +1,15 @@
use std::process::exit;
use std::{io, thread, time};
use std::time::Duration;
use std::{io, thread};
use cpal::traits::{DeviceTrait, HostTrait};
use thiserror::Error;
use super::Sink;
use crate::config::AudioFormat;
use crate::convert;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
#[cfg(all(
feature = "rodiojack-backend",
@ -174,18 +175,20 @@ pub fn open(host: cpal::Host, device: Option<String>, format: AudioFormat) -> Ro
}
impl Sink for RodioSink {
start_stop_noop!();
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()> {
let samples = packet.samples();
match self.format {
AudioFormat::F32 => {
let source =
rodio::buffer::SamplesBuffer::new(NUM_CHANNELS as u16, SAMPLE_RATE, samples);
let samples_f32: &[f32] = &converter.f64_to_f32(samples);
let source = rodio::buffer::SamplesBuffer::new(
NUM_CHANNELS as u16,
SAMPLE_RATE,
samples_f32,
);
self.rodio_sink.append(source);
}
AudioFormat::S16 => {
let samples_s16: &[i16] = &convert::to_s16(samples);
let samples_s16: &[i16] = &converter.f64_to_s16(samples);
let source = rodio::buffer::SamplesBuffer::new(
NUM_CHANNELS as u16,
SAMPLE_RATE,
@ -201,8 +204,12 @@ impl Sink for RodioSink {
// 44100 elements --> about 27 chunks
while self.rodio_sink.len() > 26 {
// sleep and wait for rodio to drain a bit
thread::sleep(time::Duration::from_millis(10));
thread::sleep(Duration::from_millis(10));
}
Ok(())
}
}
impl RodioSink {
pub const NAME: &'static str = "rodio";
}

View file

@ -1,10 +1,11 @@
use super::{Open, Sink};
use crate::config::AudioFormat;
use crate::convert;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use crate::{NUM_CHANNELS, SAMPLE_RATE};
use sdl2::audio::{AudioQueue, AudioSpecDesired};
use std::{io, thread, time};
use std::time::Duration;
use std::{io, thread};
pub enum SdlSink {
F32(AudioQueue<f32>),
@ -81,12 +82,12 @@ impl Sink for SdlSink {
Ok(())
}
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
fn write(&mut self, packet: &AudioPacket, converter: &mut Converter) -> io::Result<()> {
macro_rules! drain_sink {
($queue: expr, $size: expr) => {{
// sleep and wait for sdl thread to drain the queue a bit
while $queue.size() > (NUM_CHANNELS as u32 * $size as u32 * SAMPLE_RATE) {
thread::sleep(time::Duration::from_millis(10));
thread::sleep(Duration::from_millis(10));
}
}};
}
@ -94,16 +95,17 @@ impl Sink for SdlSink {
let samples = packet.samples();
match self {
Self::F32(queue) => {
let samples_f32: &[f32] = &converter.f64_to_f32(samples);
drain_sink!(queue, AudioFormat::F32.size());
queue.queue(samples)
queue.queue(samples_f32)
}
Self::S32(queue) => {
let samples_s32: &[i32] = &convert::to_s32(samples);
let samples_s32: &[i32] = &converter.f64_to_s32(samples);
drain_sink!(queue, AudioFormat::S32.size());
queue.queue(samples_s32)
}
Self::S16(queue) => {
let samples_s16: &[i16] = &convert::to_s16(samples);
let samples_s16: &[i16] = &converter.f64_to_s16(samples);
drain_sink!(queue, AudioFormat::S16.size());
queue.queue(samples_s16)
}
@ -111,3 +113,7 @@ impl Sink for SdlSink {
Ok(())
}
}
impl SdlSink {
pub const NAME: &'static str = "sdl";
}

View file

@ -1,5 +1,6 @@
use super::{Open, Sink, SinkAsBytes};
use crate::config::AudioFormat;
use crate::convert::Converter;
use crate::decoder::AudioPacket;
use shell_words::split;
@ -61,3 +62,7 @@ impl SinkAsBytes for SubprocessSink {
Ok(())
}
}
impl SubprocessSink {
pub const NAME: &'static str = "subprocess";
}

View file

@ -1,9 +1,10 @@
use super::player::NormalisationData;
use super::player::db_to_ratio;
use crate::convert::i24;
pub use crate::dither::{mk_ditherer, DithererBuilder, TriangularDitherer};
use std::convert::TryFrom;
use std::mem;
use std::str::FromStr;
use std::time::Duration;
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum Bitrate {
@ -32,6 +33,7 @@ impl Default for Bitrate {
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum AudioFormat {
F64,
F32,
S32,
S24,
@ -39,10 +41,11 @@ pub enum AudioFormat {
S16,
}
impl TryFrom<&String> for AudioFormat {
type Error = ();
fn try_from(s: &String) -> Result<Self, Self::Error> {
match s.to_uppercase().as_str() {
impl FromStr for AudioFormat {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_uppercase().as_ref() {
"F64" => Ok(Self::F64),
"F32" => Ok(Self::F32),
"S32" => Ok(Self::S32),
"S24" => Ok(Self::S24),
@ -64,6 +67,8 @@ impl AudioFormat {
#[allow(dead_code)]
pub fn size(&self) -> usize {
match self {
Self::F64 => mem::size_of::<f64>(),
Self::F32 => mem::size_of::<f32>(),
Self::S24_3 => mem::size_of::<i24>(),
Self::S16 => mem::size_of::<i16>(),
_ => mem::size_of::<i32>(), // S32 and S24 are both stored in i32
@ -80,7 +85,7 @@ pub enum NormalisationType {
impl FromStr for NormalisationType {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
match s.to_lowercase().as_ref() {
"album" => Ok(Self::Album),
"track" => Ok(Self::Track),
_ => Err(()),
@ -103,7 +108,7 @@ pub enum NormalisationMethod {
impl FromStr for NormalisationMethod {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
match s.to_lowercase().as_ref() {
"basic" => Ok(Self::Basic),
"dynamic" => Ok(Self::Dynamic),
_ => Err(()),
@ -117,35 +122,81 @@ impl Default for NormalisationMethod {
}
}
#[derive(Clone, Debug)]
#[derive(Clone)]
pub struct PlayerConfig {
pub bitrate: Bitrate,
pub gapless: bool,
pub passthrough: bool,
pub normalisation: bool,
pub normalisation_type: NormalisationType,
pub normalisation_method: NormalisationMethod,
pub normalisation_pregain: f32,
pub normalisation_threshold: f32,
pub normalisation_attack: f32,
pub normalisation_release: f32,
pub normalisation_knee: f32,
pub gapless: bool,
pub passthrough: bool,
pub normalisation_pregain: f64,
pub normalisation_threshold: f64,
pub normalisation_attack: Duration,
pub normalisation_release: Duration,
pub normalisation_knee: f64,
// pass function pointers so they can be lazily instantiated *after* spawning a thread
// (thereby circumventing Send bounds that they might not satisfy)
pub ditherer: Option<DithererBuilder>,
}
impl Default for PlayerConfig {
fn default() -> PlayerConfig {
PlayerConfig {
fn default() -> Self {
Self {
bitrate: Bitrate::default(),
gapless: true,
normalisation: false,
normalisation_type: NormalisationType::default(),
normalisation_method: NormalisationMethod::default(),
normalisation_pregain: 0.0,
normalisation_threshold: NormalisationData::db_to_ratio(-1.0),
normalisation_attack: 0.005,
normalisation_release: 0.1,
normalisation_threshold: db_to_ratio(-1.0),
normalisation_attack: Duration::from_millis(5),
normalisation_release: Duration::from_millis(100),
normalisation_knee: 1.0,
gapless: true,
passthrough: false,
ditherer: Some(mk_ditherer::<TriangularDitherer>),
}
}
}
// fields are intended for volume control range in dB
#[derive(Clone, Copy, Debug)]
pub enum VolumeCtrl {
Cubic(f64),
Fixed,
Linear,
Log(f64),
}
impl FromStr for VolumeCtrl {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::from_str_with_range(s, Self::DEFAULT_DB_RANGE)
}
}
impl Default for VolumeCtrl {
fn default() -> VolumeCtrl {
VolumeCtrl::Log(Self::DEFAULT_DB_RANGE)
}
}
impl VolumeCtrl {
pub const MAX_VOLUME: u16 = u16::MAX;
// Taken from: https://www.dr-lex.be/info-stuff/volumecontrols.html
pub const DEFAULT_DB_RANGE: f64 = 60.0;
pub fn from_str_with_range(s: &str, db_range: f64) -> Result<Self, <Self as FromStr>::Err> {
use self::VolumeCtrl::*;
match s.to_lowercase().as_ref() {
"cubic" => Ok(Cubic(db_range)),
"fixed" => Ok(Fixed),
"linear" => Ok(Linear),
"log" => Ok(Log(db_range)),
_ => Err(()),
}
}
}

View file

@ -1,3 +1,4 @@
use crate::dither::{Ditherer, DithererBuilder};
use zerocopy::AsBytes;
#[derive(AsBytes, Copy, Clone, Debug)]
@ -5,52 +6,122 @@ use zerocopy::AsBytes;
#[repr(transparent)]
pub struct i24([u8; 3]);
impl i24 {
fn pcm_from_i32(sample: i32) -> Self {
// drop the least significant byte
let [a, b, c, _d] = (sample >> 8).to_le_bytes();
i24([a, b, c])
fn from_s24(sample: i32) -> Self {
// trim the padding in the most significant byte
#[allow(unused_variables)]
let [a, b, c, d] = sample.to_ne_bytes();
#[cfg(target_endian = "little")]
return Self([a, b, c]);
#[cfg(target_endian = "big")]
return Self([b, c, d]);
}
}
// Losslessly represent [-1.0, 1.0] to [$type::MIN, $type::MAX] while maintaining DC linearity.
macro_rules! convert_samples_to {
($type: ident, $samples: expr) => {
convert_samples_to!($type, $samples, 0)
};
($type: ident, $samples: expr, $drop_bits: expr) => {
$samples
pub struct Converter {
ditherer: Option<Box<dyn Ditherer>>,
}
impl Converter {
pub fn new(dither_config: Option<DithererBuilder>) -> Self {
if let Some(ref ditherer_builder) = dither_config {
let ditherer = (ditherer_builder)();
info!("Converting with ditherer: {}", ditherer.name());
Self {
ditherer: Some(ditherer),
}
} else {
Self { ditherer: None }
}
}
/// To convert PCM samples from floating point normalized as `-1.0..=1.0`
/// to 32-bit signed integer, multiply by 2147483648 (0x80000000) and
/// saturate at the bounds of `i32`.
const SCALE_S32: f64 = 2147483648.;
/// To convert PCM samples from floating point normalized as `-1.0..=1.0`
/// to 24-bit signed integer, multiply by 8388608 (0x800000) and saturate
/// at the bounds of `i24`.
const SCALE_S24: f64 = 8388608.;
/// To convert PCM samples from floating point normalized as `-1.0..=1.0`
/// to 16-bit signed integer, multiply by 32768 (0x8000) and saturate at
/// the bounds of `i16`. When the samples were encoded using the same
/// scaling factor, like the reference Vorbis encoder does, this makes
/// conversions transparent.
const SCALE_S16: f64 = 32768.;
pub fn scale(&mut self, sample: f64, factor: f64) -> f64 {
let dither = match self.ditherer {
Some(ref mut d) => d.noise(),
None => 0.0,
};
// From the many float to int conversion methods available, match what
// the reference Vorbis implementation uses: sample * 32768 (for 16 bit)
let int_value = sample * factor + dither;
// Casting float to integer rounds towards zero by default, i.e. it
// truncates, and that generates larger error than rounding to nearest.
int_value.round()
}
// Special case for samples packed in a word of greater bit depth (e.g.
// S24): clamp between min and max to ensure that the most significant
// byte is zero. Otherwise, dithering may cause an overflow. This is not
// necessary for other formats, because casting to integer will saturate
// to the bounds of the primitive.
pub fn clamping_scale(&mut self, sample: f64, factor: f64) -> f64 {
let int_value = self.scale(sample, factor);
// In two's complement, there are more negative than positive values.
let min = -factor;
let max = factor - 1.0;
if int_value < min {
return min;
} else if int_value > max {
return max;
}
int_value
}
pub fn f64_to_f32(&mut self, samples: &[f64]) -> Vec<f32> {
samples.iter().map(|sample| *sample as f32).collect()
}
pub fn f64_to_s32(&mut self, samples: &[f64]) -> Vec<i32> {
samples
.iter()
.map(|sample| self.scale(*sample, Self::SCALE_S32) as i32)
.collect()
}
// S24 is 24-bit PCM packed in an upper 32-bit word
pub fn f64_to_s24(&mut self, samples: &[f64]) -> Vec<i32> {
samples
.iter()
.map(|sample| self.clamping_scale(*sample, Self::SCALE_S24) as i32)
.collect()
}
// S24_3 is 24-bit PCM in a 3-byte array
pub fn f64_to_s24_3(&mut self, samples: &[f64]) -> Vec<i24> {
samples
.iter()
.map(|sample| {
// Losslessly represent [-1.0, 1.0] to [$type::MIN, $type::MAX]
// while maintaining DC linearity. There is nothing to be gained
// by doing this in f64, as the significand of a f32 is 24 bits,
// just like the maximum bit depth we are converting to.
let int_value = *sample * (std::$type::MAX as f32 + 0.5) - 0.5;
// Casting floats to ints truncates by default, which results
// in larger quantization error than rounding arithmetically.
// Flooring is faster, but again with larger error.
int_value.round() as $type >> $drop_bits
// Not as DRY as calling f32_to_s24 first, but this saves iterating
// over all samples twice.
let int_value = self.clamping_scale(*sample, Self::SCALE_S24) as i32;
i24::from_s24(int_value)
})
.collect()
};
}
}
pub fn to_s32(samples: &[f32]) -> Vec<i32> {
convert_samples_to!(i32, samples)
}
pub fn to_s24(samples: &[f32]) -> Vec<i32> {
convert_samples_to!(i32, samples, 8)
}
pub fn to_s24_3(samples: &[f32]) -> Vec<i24> {
to_s32(samples)
.iter()
.map(|sample| i24::pcm_from_i32(*sample))
.collect()
}
pub fn to_s16(samples: &[f32]) -> Vec<i16> {
convert_samples_to!(i16, samples)
pub fn f64_to_s16(&mut self, samples: &[f64]) -> Vec<i16> {
samples
.iter()
.map(|sample| self.scale(*sample, Self::SCALE_S16) as i16)
.collect()
}
}

View file

@ -1,10 +1,12 @@
use super::{AudioDecoder, AudioError, AudioPacket};
use lewton::inside_ogg::OggStreamReader;
use lewton::samples::InterleavedSamples;
use std::error;
use std::fmt;
use std::io::{Read, Seek};
use std::time::Duration;
pub struct VorbisDecoder<R: Read + Seek>(OggStreamReader<R>);
pub struct VorbisError(lewton::VorbisError);
@ -23,7 +25,7 @@ where
R: Read + Seek,
{
fn seek(&mut self, ms: i64) -> Result<(), AudioError> {
let absgp = ms * 44100 / 1000;
let absgp = Duration::from_millis(ms as u64 * crate::SAMPLE_RATE as u64).as_secs();
match self.0.seek_absgp_pg(absgp as u64) {
Ok(_) => Ok(()),
Err(err) => Err(AudioError::VorbisError(err.into())),
@ -35,11 +37,8 @@ where
use lewton::OggReadError::NoCapturePatternFound;
use lewton::VorbisError::{BadAudio, OggError};
loop {
match self
.0
.read_dec_packet_generic::<lewton::samples::InterleavedSamples<f32>>()
{
Ok(Some(packet)) => return Ok(Some(AudioPacket::Samples(packet.samples))),
match self.0.read_dec_packet_generic::<InterleavedSamples<f32>>() {
Ok(Some(packet)) => return Ok(Some(AudioPacket::samples_from_f32(packet.samples))),
Ok(None) => return Ok(None),
Err(BadAudio(AudioIsHeader)) => (),

View file

@ -1,89 +0,0 @@
#[cfg(feature = "with-tremor")]
use librespot_tremor as vorbis;
use super::{AudioDecoder, AudioError, AudioPacket};
use std::error;
use std::fmt;
use std::io::{Read, Seek};
pub struct VorbisDecoder<R: Read + Seek>(vorbis::Decoder<R>);
pub struct VorbisError(vorbis::VorbisError);
impl<R> VorbisDecoder<R>
where
R: Read + Seek,
{
pub fn new(input: R) -> Result<VorbisDecoder<R>, VorbisError> {
Ok(VorbisDecoder(vorbis::Decoder::new(input)?))
}
}
impl<R> AudioDecoder for VorbisDecoder<R>
where
R: Read + Seek,
{
#[cfg(not(feature = "with-tremor"))]
fn seek(&mut self, ms: i64) -> Result<(), AudioError> {
self.0.time_seek(ms as f64 / 1000f64)?;
Ok(())
}
#[cfg(feature = "with-tremor")]
fn seek(&mut self, ms: i64) -> Result<(), AudioError> {
self.0.time_seek(ms)?;
Ok(())
}
fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> {
loop {
match self.0.packets().next() {
Some(Ok(packet)) => {
// Losslessly represent [-32768, 32767] to [-1.0, 1.0] while maintaining DC linearity.
return Ok(Some(AudioPacket::Samples(
packet
.data
.iter()
.map(|sample| {
((*sample as f64 + 0.5) / (std::i16::MAX as f64 + 0.5)) as f32
})
.collect(),
)));
}
None => return Ok(None),
Some(Err(vorbis::VorbisError::Hole)) => (),
Some(Err(err)) => return Err(err.into()),
}
}
}
}
impl From<vorbis::VorbisError> for VorbisError {
fn from(err: vorbis::VorbisError) -> VorbisError {
VorbisError(err)
}
}
impl fmt::Debug for VorbisError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl fmt::Display for VorbisError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
impl error::Error for VorbisError {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
error::Error::source(&self.0)
}
}
impl From<vorbis::VorbisError> for AudioError {
fn from(err: vorbis::VorbisError) -> AudioError {
AudioError::VorbisError(VorbisError(err))
}
}

View file

@ -1,27 +1,23 @@
use std::fmt;
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(any(feature = "with-tremor", feature = "with-vorbis"))] {
mod libvorbis_decoder;
pub use libvorbis_decoder::{VorbisDecoder, VorbisError};
} else {
mod lewton_decoder;
pub use lewton_decoder::{VorbisDecoder, VorbisError};
}
}
mod lewton_decoder;
pub use lewton_decoder::{VorbisDecoder, VorbisError};
mod passthrough_decoder;
pub use passthrough_decoder::{PassthroughDecoder, PassthroughError};
pub enum AudioPacket {
Samples(Vec<f32>),
Samples(Vec<f64>),
OggData(Vec<u8>),
}
impl AudioPacket {
pub fn samples(&self) -> &[f32] {
pub fn samples_from_f32(f32_samples: Vec<f32>) -> Self {
let f64_samples = f32_samples.iter().map(|sample| *sample as f64).collect();
AudioPacket::Samples(f64_samples)
}
pub fn samples(&self) -> &[f64] {
match self {
AudioPacket::Samples(s) => s,
AudioPacket::OggData(_) => panic!("can't return OggData on samples"),

View file

@ -1,8 +1,10 @@
// Passthrough decoder for librespot
use super::{AudioDecoder, AudioError, AudioPacket};
use crate::SAMPLE_RATE;
use ogg::{OggReadError, Packet, PacketReader, PacketWriteEndInfo, PacketWriter};
use std::fmt;
use std::io::{Read, Seek};
use std::time::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
fn get_header<T>(code: u8, rdr: &mut PacketReader<T>) -> Result<Box<[u8]>, PassthroughError>
@ -12,7 +14,7 @@ where
let pck: Packet = rdr.read_packet_expected()?;
let pkt_type = pck.data[0];
debug!("Vorbis header type{}", &pkt_type);
debug!("Vorbis header type {}", &pkt_type);
if pkt_type != code {
return Err(PassthroughError(OggReadError::InvalidData));
@ -96,7 +98,10 @@ impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
self.stream_serial += 1;
// hard-coded to 44.1 kHz
match self.rdr.seek_absgp(None, (ms * 44100 / 1000) as u64) {
match self.rdr.seek_absgp(
None,
Duration::from_millis(ms as u64 * SAMPLE_RATE as u64).as_secs(),
) {
Ok(_) => {
// need to set some offset for next_page()
let pck = self.rdr.read_packet().unwrap().unwrap();

150
playback/src/dither.rs Normal file
View file

@ -0,0 +1,150 @@
use rand::rngs::ThreadRng;
use rand_distr::{Distribution, Normal, Triangular, Uniform};
use std::fmt;
const NUM_CHANNELS: usize = 2;
// Dithering lowers digital-to-analog conversion ("requantization") error,
// linearizing output, lowering distortion and replacing it with a constant,
// fixed noise level, which is more pleasant to the ear than the distortion.
//
// Guidance:
//
// * On S24, S24_3 and S24, the default is to use triangular dithering.
// Depending on personal preference you may use Gaussian dithering instead;
// it's not as good objectively, but it may be preferred subjectively if
// you are looking for a more "analog" sound akin to tape hiss.
//
// * Advanced users who know that they have a DAC without noise shaping have
// a third option: high-passed dithering, which is like triangular dithering
// except that it moves dithering noise up in frequency where it is less
// audible. Note: 99% of DACs are of delta-sigma design with noise shaping,
// so unless you have a multibit / R2R DAC, or otherwise know what you are
// doing, this is not for you.
//
// * Don't dither or shape noise on S32 or F32. On F32 it's not supported
// anyway (there are no integer conversions and so no rounding errors) and
// on S32 the noise level is so far down that it is simply inaudible even
// after volume normalisation and control.
//
pub trait Ditherer {
fn new() -> Self
where
Self: Sized;
fn name(&self) -> &'static str;
fn noise(&mut self) -> f64;
}
impl fmt::Display for dyn Ditherer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name())
}
}
// Implementation note: we save the handle to ThreadRng so it doesn't require
// a lookup on each call (which is on each sample!). This is ~2.5x as fast.
// Downside is that it is not Send so we cannot move it around player threads.
//
pub struct TriangularDitherer {
cached_rng: ThreadRng,
distribution: Triangular<f64>,
}
impl Ditherer for TriangularDitherer {
fn new() -> Self {
Self {
cached_rng: rand::thread_rng(),
// 2 LSB peak-to-peak needed to linearize the response:
distribution: Triangular::new(-1.0, 1.0, 0.0).unwrap(),
}
}
fn name(&self) -> &'static str {
Self::NAME
}
fn noise(&mut self) -> f64 {
self.distribution.sample(&mut self.cached_rng)
}
}
impl TriangularDitherer {
pub const NAME: &'static str = "tpdf";
}
pub struct GaussianDitherer {
cached_rng: ThreadRng,
distribution: Normal<f64>,
}
impl Ditherer for GaussianDitherer {
fn new() -> Self {
Self {
cached_rng: rand::thread_rng(),
// 1/2 LSB RMS needed to linearize the response:
distribution: Normal::new(0.0, 0.5).unwrap(),
}
}
fn name(&self) -> &'static str {
Self::NAME
}
fn noise(&mut self) -> f64 {
self.distribution.sample(&mut self.cached_rng)
}
}
impl GaussianDitherer {
pub const NAME: &'static str = "gpdf";
}
pub struct HighPassDitherer {
active_channel: usize,
previous_noises: [f64; NUM_CHANNELS],
cached_rng: ThreadRng,
distribution: Uniform<f64>,
}
impl Ditherer for HighPassDitherer {
fn new() -> Self {
Self {
active_channel: 0,
previous_noises: [0.0; NUM_CHANNELS],
cached_rng: rand::thread_rng(),
distribution: Uniform::new_inclusive(-0.5, 0.5), // 1 LSB +/- 1 LSB (previous) = 2 LSB
}
}
fn name(&self) -> &'static str {
Self::NAME
}
fn noise(&mut self) -> f64 {
let new_noise = self.distribution.sample(&mut self.cached_rng);
let high_passed_noise = new_noise - self.previous_noises[self.active_channel];
self.previous_noises[self.active_channel] = new_noise;
self.active_channel ^= 1;
high_passed_noise
}
}
impl HighPassDitherer {
pub const NAME: &'static str = "tpdf_hp";
}
pub fn mk_ditherer<D: Ditherer + 'static>() -> Box<dyn Ditherer> {
Box::new(D::new())
}
pub type DithererBuilder = fn() -> Box<dyn Ditherer>;
pub fn find_ditherer(name: Option<String>) -> Option<DithererBuilder> {
match name.as_deref() {
Some(TriangularDitherer::NAME) => Some(mk_ditherer::<TriangularDitherer>),
Some(GaussianDitherer::NAME) => Some(mk_ditherer::<GaussianDitherer>),
Some(HighPassDitherer::NAME) => Some(mk_ditherer::<HighPassDitherer>),
_ => None,
}
}

View file

@ -9,5 +9,10 @@ pub mod audio_backend;
pub mod config;
mod convert;
mod decoder;
pub mod dither;
pub mod mixer;
pub mod player;
pub const SAMPLE_RATE: u32 = 44100;
pub const NUM_CHANNELS: u8 = 2;
pub const SAMPLES_PER_SECOND: u32 = SAMPLE_RATE as u32 * NUM_CHANNELS as u32;

View file

@ -1,218 +1,266 @@
use super::AudioFilter;
use super::{Mixer, MixerConfig};
use std::error::Error;
use crate::player::{db_to_ratio, ratio_to_db};
const SND_CTL_TLV_DB_GAIN_MUTE: i64 = -9999999;
use super::mappings::{LogMapping, MappedCtrl, VolumeMapping};
use super::{Mixer, MixerConfig, VolumeCtrl};
#[derive(Clone)]
struct AlsaMixerVolumeParams {
min: i64,
max: i64,
range: f64,
min_db: alsa::mixer::MilliBel,
max_db: alsa::mixer::MilliBel,
has_switch: bool,
}
use alsa::ctl::{ElemId, ElemIface};
use alsa::mixer::{MilliBel, SelemChannelId, SelemId};
use alsa::{Ctl, Round};
use std::ffi::CString;
#[derive(Clone)]
pub struct AlsaMixer {
config: MixerConfig,
params: AlsaMixerVolumeParams,
min: i64,
max: i64,
range: i64,
min_db: f64,
max_db: f64,
db_range: f64,
has_switch: bool,
is_softvol: bool,
use_linear_in_db: bool,
}
impl AlsaMixer {
fn pvol<T>(&self, vol: T, min: T, max: T) -> f64
where
T: std::ops::Sub + Copy,
f64: std::convert::From<<T as std::ops::Sub>::Output>,
{
f64::from(vol - min) / f64::from(max - min)
}
fn init_mixer(mut config: MixerConfig) -> Result<AlsaMixer, Box<dyn Error>> {
let mixer = alsa::mixer::Mixer::new(&config.card, false)?;
let sid = alsa::mixer::SelemId::new(&config.mixer, config.index);
let selem = mixer.find_selem(&sid).unwrap_or_else(|| {
panic!(
"Couldn't find simple mixer control for {},{}",
&config.mixer, &config.index,
)
});
let (min, max) = selem.get_playback_volume_range();
let (min_db, max_db) = selem.get_playback_db_range();
let hw_mix = selem
.get_playback_vol_db(alsa::mixer::SelemChannelId::mono())
.is_ok();
let has_switch = selem.has_playback_switch();
if min_db != alsa::mixer::MilliBel(SND_CTL_TLV_DB_GAIN_MUTE) {
warn!("Alsa min-db is not SND_CTL_TLV_DB_GAIN_MUTE!!");
}
info!(
"Alsa Mixer info min: {} ({:?}[dB]) -- max: {} ({:?}[dB]) HW: {:?}",
min, min_db, max, max_db, hw_mix
);
if config.mapped_volume && (max_db - min_db <= alsa::mixer::MilliBel(24)) {
warn!(
"Switching to linear volume mapping, control range: {:?}",
max_db - min_db
);
config.mapped_volume = false;
} else if !config.mapped_volume {
info!("Using Alsa linear volume");
}
if min_db != alsa::mixer::MilliBel(SND_CTL_TLV_DB_GAIN_MUTE) {
debug!("Alsa min-db is not SND_CTL_TLV_DB_GAIN_MUTE!!");
}
Ok(AlsaMixer {
config,
params: AlsaMixerVolumeParams {
min,
max,
range: (max - min) as f64,
min_db,
max_db,
has_switch,
},
})
}
fn map_volume(&self, set_volume: Option<u16>) -> Result<u16, Box<dyn Error>> {
let mixer = alsa::mixer::Mixer::new(&self.config.card, false)?;
let sid = alsa::mixer::SelemId::new(&*self.config.mixer, self.config.index);
let selem = mixer.find_selem(&sid).unwrap();
let cur_vol = selem
.get_playback_volume(alsa::mixer::SelemChannelId::mono())
.expect("Couldn't get current volume");
let cur_vol_db = selem
.get_playback_vol_db(alsa::mixer::SelemChannelId::mono())
.unwrap_or(alsa::mixer::MilliBel(-SND_CTL_TLV_DB_GAIN_MUTE));
let mut new_vol: u16 = 0;
trace!("Current alsa volume: {}{:?}", cur_vol, cur_vol_db);
match set_volume {
Some(vol) => {
if self.params.has_switch {
let is_muted = selem
.get_playback_switch(alsa::mixer::SelemChannelId::mono())
.map(|b| b == 0)
.unwrap_or(false);
if vol == 0 {
debug!("Toggling mute::True");
selem.set_playback_switch_all(0).expect("Can't switch mute");
return Ok(vol);
} else if is_muted {
debug!("Toggling mute::False");
selem.set_playback_switch_all(1).expect("Can't reset mute");
}
}
if self.config.mapped_volume {
// Cubic mapping ala alsamixer
// https://linux.die.net/man/1/alsamixer
// In alsamixer, the volume is mapped to a value that is more natural for a
// human ear. The mapping is designed so that the position in the interval is
// proportional to the volume as a human ear would perceive it, i.e. the
// position is the cubic root of the linear sample multiplication factor. For
// controls with a small range (24 dB or less), the mapping is linear in the dB
// values so that each step has the same size visually. TODO
// TODO: Check if min is not mute!
let vol_db = (self.pvol(vol, 0x0000, 0xFFFF).log10() * 6000.0).floor() as i64
+ self.params.max_db.0;
selem
.set_playback_db_all(alsa::mixer::MilliBel(vol_db), alsa::Round::Floor)
.expect("Couldn't set alsa dB volume");
debug!(
"Mapping volume [{:.3}%] {:?} [u16] ->> Alsa [{:.3}%] {:?} [dB] - {} [i64]",
self.pvol(vol, 0x0000, 0xFFFF) * 100.0,
vol,
self.pvol(
vol_db as f64,
self.params.min as f64,
self.params.max as f64
) * 100.0,
vol_db as f64 / 100.0,
vol_db
);
} else {
// Linear mapping
let alsa_volume =
((vol as f64 / 0xFFFF as f64) * self.params.range) as i64 + self.params.min;
selem
.set_playback_volume_all(alsa_volume)
.expect("Couldn't set alsa raw volume");
debug!(
"Mapping volume [{:.3}%] {:?} [u16] ->> Alsa [{:.3}%] {:?} [i64]",
self.pvol(vol, 0x0000, 0xFFFF) * 100.0,
vol,
self.pvol(
alsa_volume as f64,
self.params.min as f64,
self.params.max as f64
) * 100.0,
alsa_volume
);
};
}
None => {
new_vol = (((cur_vol - self.params.min) as f64 / self.params.range) * 0xFFFF as f64)
as u16;
debug!(
"Mapping volume [{:.3}%] {:?} [u16] <<- Alsa [{:.3}%] {:?} [i64]",
self.pvol(new_vol, 0x0000, 0xFFFF),
new_vol,
self.pvol(
cur_vol as f64,
self.params.min as f64,
self.params.max as f64
),
cur_vol
);
}
}
Ok(new_vol)
}
}
// min_db cannot be depended on to be mute. Also note that contrary to
// its name copied verbatim from Alsa, this is in millibel scale.
const SND_CTL_TLV_DB_GAIN_MUTE: MilliBel = MilliBel(-9999999);
const ZERO_DB: MilliBel = MilliBel(0);
impl Mixer for AlsaMixer {
fn open(config: Option<MixerConfig>) -> AlsaMixer {
let config = config.unwrap_or_default();
fn open(config: MixerConfig) -> Self {
info!(
"Setting up new mixer: card:{} mixer:{} index:{}",
config.card, config.mixer, config.index
"Mixing with alsa and volume control: {:?} for card: {} with mixer control: {},{}",
config.volume_ctrl, config.card, config.control, config.index,
);
AlsaMixer::init_mixer(config).expect("Error setting up mixer!")
let mut config = config; // clone
let mixer =
alsa::mixer::Mixer::new(&config.card, false).expect("Could not open Alsa mixer");
let simple_element = mixer
.find_selem(&SelemId::new(&config.control, config.index))
.expect("Could not find Alsa mixer control");
// Query capabilities
let has_switch = simple_element.has_playback_switch();
let is_softvol = simple_element
.get_playback_vol_db(SelemChannelId::mono())
.is_err();
// Query raw volume range
let (min, max) = simple_element.get_playback_volume_range();
let range = i64::abs(max - min);
// Query dB volume range -- note that Alsa exposes a different
// API for hardware and software mixers
let (min_millibel, max_millibel) = if is_softvol {
let control =
Ctl::new(&config.card, false).expect("Could not open Alsa softvol with that card");
let mut element_id = ElemId::new(ElemIface::Mixer);
element_id.set_name(
&CString::new(config.control.as_str())
.expect("Could not open Alsa softvol with that name"),
);
element_id.set_index(config.index);
let (min_millibel, mut max_millibel) = control
.get_db_range(&element_id)
.expect("Could not get Alsa softvol dB range");
// Alsa can report incorrect maximum volumes due to rounding
// errors. e.g. Alsa rounds [-60.0..0.0] in range [0..255] to
// step size 0.23. Then multiplying 0.23 by 255 incorrectly
// returns a dB range of 58.65 instead of 60 dB, from
// [-60.00..-1.35]. This workaround checks the default case
// where the maximum dB volume is expected to be 0, and cannot
// cover all cases.
if max_millibel != ZERO_DB {
warn!("Alsa mixer reported maximum dB != 0, which is suspect");
let reported_step_size = (max_millibel - min_millibel).0 / range;
let assumed_step_size = (ZERO_DB - min_millibel).0 / range;
if reported_step_size == assumed_step_size {
warn!("Alsa rounding error detected, setting maximum dB to {:.2} instead of {:.2}", ZERO_DB.to_db(), max_millibel.to_db());
max_millibel = ZERO_DB;
} else {
warn!("Please manually set with `--volume-ctrl` if this is incorrect");
}
}
(min_millibel, max_millibel)
} else {
let (mut min_millibel, max_millibel) = simple_element.get_playback_db_range();
// Some controls report that their minimum volume is mute, instead
// of their actual lowest dB setting before that.
if min_millibel == SND_CTL_TLV_DB_GAIN_MUTE && min < max {
debug!("Alsa mixer reported minimum dB as mute, trying workaround");
min_millibel = simple_element
.ask_playback_vol_db(min + 1)
.expect("Could not convert Alsa raw volume to dB volume");
}
(min_millibel, max_millibel)
};
let min_db = min_millibel.to_db() as f64;
let max_db = max_millibel.to_db() as f64;
let db_range = f64::abs(max_db - min_db);
// Synchronize the volume control dB range with the mixer control,
// unless it was already set with a command line option.
if !config.volume_ctrl.range_ok() {
config.volume_ctrl.set_db_range(db_range);
}
// For hardware controls with a small range (24 dB or less),
// force using the dB API with a linear mapping.
let mut use_linear_in_db = false;
if !is_softvol && db_range <= 24.0 {
use_linear_in_db = true;
config.volume_ctrl = VolumeCtrl::Linear;
}
debug!("Alsa mixer control is softvol: {}", is_softvol);
debug!("Alsa support for playback (mute) switch: {}", has_switch);
debug!("Alsa raw volume range: [{}..{}] ({})", min, max, range);
debug!(
"Alsa dB volume range: [{:.2}..{:.2}] ({:.2})",
min_db, max_db, db_range
);
debug!("Alsa forcing linear dB mapping: {}", use_linear_in_db);
Self {
config,
min,
max,
range,
min_db,
max_db,
db_range,
has_switch,
is_softvol,
use_linear_in_db,
}
}
fn start(&self) {}
fn stop(&self) {}
fn volume(&self) -> u16 {
match self.map_volume(None) {
Ok(vol) => vol,
Err(e) => {
error!("Error getting volume for <{}>, {:?}", self.config.card, e);
0
}
let mixer =
alsa::mixer::Mixer::new(&self.config.card, false).expect("Could not open Alsa mixer");
let simple_element = mixer
.find_selem(&SelemId::new(&self.config.control, self.config.index))
.expect("Could not find Alsa mixer control");
if self.switched_off() {
return 0;
}
let mut mapped_volume = if self.is_softvol {
let raw_volume = simple_element
.get_playback_volume(SelemChannelId::mono())
.expect("Could not get raw Alsa volume");
raw_volume as f64 / self.range as f64 - self.min as f64
} else {
let db_volume = simple_element
.get_playback_vol_db(SelemChannelId::mono())
.expect("Could not get Alsa dB volume")
.to_db() as f64;
if self.use_linear_in_db {
(db_volume - self.min_db) / self.db_range
} else if f64::abs(db_volume - SND_CTL_TLV_DB_GAIN_MUTE.to_db() as f64) <= f64::EPSILON
{
0.0
} else {
db_to_ratio(db_volume - self.max_db)
}
};
// see comment in `set_volume` why we are handling an antilog volume
if mapped_volume > 0.0 && self.is_some_linear() {
mapped_volume = LogMapping::linear_to_mapped(mapped_volume, self.db_range);
}
self.config.volume_ctrl.from_mapped(mapped_volume)
}
fn set_volume(&self, volume: u16) {
match self.map_volume(Some(volume)) {
Ok(_) => (),
Err(e) => error!("Error setting volume for <{}>, {:?}", self.config.card, e),
}
}
let mixer =
alsa::mixer::Mixer::new(&self.config.card, false).expect("Could not open Alsa mixer");
let simple_element = mixer
.find_selem(&SelemId::new(&self.config.control, self.config.index))
.expect("Could not find Alsa mixer control");
fn get_audio_filter(&self) -> Option<Box<dyn AudioFilter + Send>> {
None
if self.has_switch {
if volume == 0 {
debug!("Disabling playback (setting mute) on Alsa");
simple_element
.set_playback_switch_all(0)
.expect("Could not disable playback (set mute) on Alsa");
} else if self.switched_off() {
debug!("Enabling playback (unsetting mute) on Alsa");
simple_element
.set_playback_switch_all(1)
.expect("Could not enable playback (unset mute) on Alsa");
}
}
let mut mapped_volume = self.config.volume_ctrl.to_mapped(volume);
// Alsa's linear algorithms map everything onto log. Alsa softvol does
// this internally. In the case of `use_linear_in_db` this happens
// automatically by virtue of the dB scale. This means that linear
// controls become log, log becomes log-on-log, and so on. To make
// the controls work as expected, perform an antilog calculation to
// counteract what Alsa will be doing to the set volume.
if mapped_volume > 0.0 && self.is_some_linear() {
mapped_volume = LogMapping::mapped_to_linear(mapped_volume, self.db_range);
}
if self.is_softvol {
let scaled_volume = (self.min as f64 + mapped_volume * self.range as f64) as i64;
debug!("Setting Alsa raw volume to {}", scaled_volume);
simple_element
.set_playback_volume_all(scaled_volume)
.expect("Could not set Alsa raw volume");
return;
}
let db_volume = if self.use_linear_in_db {
self.min_db + mapped_volume * self.db_range
} else if volume == 0 {
// prevent ratio_to_db(0.0) from returning -inf
SND_CTL_TLV_DB_GAIN_MUTE.to_db() as f64
} else {
ratio_to_db(mapped_volume) + self.max_db
};
debug!("Setting Alsa volume to {:.2} dB", db_volume);
simple_element
.set_playback_db_all(MilliBel::from_db(db_volume as f32), Round::Floor)
.expect("Could not set Alsa dB volume");
}
}
impl AlsaMixer {
pub const NAME: &'static str = "alsa";
fn switched_off(&self) -> bool {
if !self.has_switch {
return false;
}
let mixer =
alsa::mixer::Mixer::new(&self.config.card, false).expect("Could not open Alsa mixer");
let simple_element = mixer
.find_selem(&SelemId::new(&self.config.control, self.config.index))
.expect("Could not find Alsa mixer control");
simple_element
.get_playback_switch(SelemChannelId::mono())
.map(|playback| playback == 0)
.unwrap_or(false)
}
fn is_some_linear(&self) -> bool {
self.is_softvol || self.use_linear_in_db
}
}

View file

@ -0,0 +1,163 @@
use super::VolumeCtrl;
use crate::player::db_to_ratio;
pub trait MappedCtrl {
fn to_mapped(&self, volume: u16) -> f64;
fn from_mapped(&self, mapped_volume: f64) -> u16;
fn db_range(&self) -> f64;
fn set_db_range(&mut self, new_db_range: f64);
fn range_ok(&self) -> bool;
}
impl MappedCtrl for VolumeCtrl {
fn to_mapped(&self, volume: u16) -> f64 {
// More than just an optimization, this ensures that zero volume is
// really mute (both the log and cubic equations would otherwise not
// reach zero).
if volume == 0 {
return 0.0;
} else if volume == Self::MAX_VOLUME {
// And limit in case of rounding errors (as is the case for log).
return 1.0;
}
let normalized_volume = volume as f64 / Self::MAX_VOLUME as f64;
let mapped_volume = if self.range_ok() {
match *self {
Self::Cubic(db_range) => {
CubicMapping::linear_to_mapped(normalized_volume, db_range)
}
Self::Log(db_range) => LogMapping::linear_to_mapped(normalized_volume, db_range),
_ => normalized_volume,
}
} else {
// Ensure not to return -inf or NaN due to division by zero.
error!(
"{:?} does not work with 0 dB range, using linear mapping instead",
self
);
normalized_volume
};
debug!(
"Input volume {} mapped to: {:.2}%",
volume,
mapped_volume * 100.0
);
mapped_volume
}
fn from_mapped(&self, mapped_volume: f64) -> u16 {
// More than just an optimization, this ensures that zero mapped volume
// is unmapped to non-negative real numbers (otherwise the log and cubic
// equations would respectively return -inf and -1/9.)
if f64::abs(mapped_volume - 0.0) <= f64::EPSILON {
return 0;
} else if f64::abs(mapped_volume - 1.0) <= f64::EPSILON {
return Self::MAX_VOLUME;
}
let unmapped_volume = if self.range_ok() {
match *self {
Self::Cubic(db_range) => CubicMapping::mapped_to_linear(mapped_volume, db_range),
Self::Log(db_range) => LogMapping::mapped_to_linear(mapped_volume, db_range),
_ => mapped_volume,
}
} else {
// Ensure not to return -inf or NaN due to division by zero.
error!(
"{:?} does not work with 0 dB range, using linear mapping instead",
self
);
mapped_volume
};
(unmapped_volume * Self::MAX_VOLUME as f64) as u16
}
fn db_range(&self) -> f64 {
match *self {
Self::Fixed => 0.0,
Self::Linear => Self::DEFAULT_DB_RANGE, // arbitrary, could be anything > 0
Self::Log(db_range) | Self::Cubic(db_range) => db_range,
}
}
fn set_db_range(&mut self, new_db_range: f64) {
match self {
Self::Cubic(ref mut db_range) | Self::Log(ref mut db_range) => *db_range = new_db_range,
_ => error!("Invalid to set dB range for volume control type {:?}", self),
}
debug!("Volume control is now {:?}", self)
}
fn range_ok(&self) -> bool {
self.db_range() > 0.0 || matches!(self, Self::Fixed | Self::Linear)
}
}
pub trait VolumeMapping {
fn linear_to_mapped(unmapped_volume: f64, db_range: f64) -> f64;
fn mapped_to_linear(mapped_volume: f64, db_range: f64) -> f64;
}
// Volume conversion taken from: https://www.dr-lex.be/info-stuff/volumecontrols.html#ideal2
//
// As the human auditory system has a logarithmic sensitivity curve, this
// mapping results in a near linear loudness experience with the listener.
pub struct LogMapping {}
impl VolumeMapping for LogMapping {
fn linear_to_mapped(normalized_volume: f64, db_range: f64) -> f64 {
let (db_ratio, ideal_factor) = Self::coefficients(db_range);
f64::exp(ideal_factor * normalized_volume) / db_ratio
}
fn mapped_to_linear(mapped_volume: f64, db_range: f64) -> f64 {
let (db_ratio, ideal_factor) = Self::coefficients(db_range);
f64::ln(db_ratio * mapped_volume) / ideal_factor
}
}
impl LogMapping {
fn coefficients(db_range: f64) -> (f64, f64) {
let db_ratio = db_to_ratio(db_range);
let ideal_factor = f64::ln(db_ratio);
(db_ratio, ideal_factor)
}
}
// Ported from: https://github.com/alsa-project/alsa-utils/blob/master/alsamixer/volume_mapping.c
// which in turn was inspired by: https://www.robotplanet.dk/audio/audio_gui_design/
//
// Though this mapping is computationally less expensive than the logarithmic
// mapping, it really does not matter as librespot memoizes the mapped value.
// Use this mapping if you have some reason to mimic Alsa's native mixer or
// prefer a more granular control in the upper volume range.
//
// Note: https://www.dr-lex.be/info-stuff/volumecontrols.html#ideal3 shows
// better approximations to the logarithmic curve but because we only intend
// to mimic Alsa here, we do not implement them. If your desire is to use a
// logarithmic mapping, then use that volume control.
pub struct CubicMapping {}
impl VolumeMapping for CubicMapping {
fn linear_to_mapped(normalized_volume: f64, db_range: f64) -> f64 {
let min_norm = Self::min_norm(db_range);
f64::powi(normalized_volume * (1.0 - min_norm) + min_norm, 3)
}
fn mapped_to_linear(mapped_volume: f64, db_range: f64) -> f64 {
let min_norm = Self::min_norm(db_range);
(mapped_volume.powf(1.0 / 3.0) - min_norm) / (1.0 - min_norm)
}
}
impl CubicMapping {
fn min_norm(db_range: f64) -> f64 {
// Note that this 60.0 is unrelated to DEFAULT_DB_RANGE.
// Instead, it's the cubic voltage to dB ratio.
f64::powf(10.0, -1.0 * db_range / 60.0)
}
}

View file

@ -1,20 +1,28 @@
use crate::config::VolumeCtrl;
pub mod mappings;
use self::mappings::MappedCtrl;
pub trait Mixer: Send {
fn open(_: Option<MixerConfig>) -> Self
fn open(config: MixerConfig) -> Self
where
Self: Sized;
fn start(&self);
fn stop(&self);
fn set_volume(&self, volume: u16);
fn volume(&self) -> u16;
fn get_audio_filter(&self) -> Option<Box<dyn AudioFilter + Send>> {
None
}
}
pub trait AudioFilter {
fn modify_stream(&self, data: &mut [f32]);
fn modify_stream(&self, data: &mut [f64]);
}
pub mod softmixer;
use self::softmixer::SoftMixer;
#[cfg(feature = "alsa-backend")]
pub mod alsamixer;
#[cfg(feature = "alsa-backend")]
@ -23,36 +31,33 @@ use self::alsamixer::AlsaMixer;
#[derive(Debug, Clone)]
pub struct MixerConfig {
pub card: String,
pub mixer: String,
pub control: String,
pub index: u32,
pub mapped_volume: bool,
pub volume_ctrl: VolumeCtrl,
}
impl Default for MixerConfig {
fn default() -> MixerConfig {
MixerConfig {
card: String::from("default"),
mixer: String::from("PCM"),
control: String::from("PCM"),
index: 0,
mapped_volume: true,
volume_ctrl: VolumeCtrl::default(),
}
}
}
pub mod softmixer;
use self::softmixer::SoftMixer;
pub type MixerFn = fn(MixerConfig) -> Box<dyn Mixer>;
type MixerFn = fn(Option<MixerConfig>) -> Box<dyn Mixer>;
fn mk_sink<M: Mixer + 'static>(device: Option<MixerConfig>) -> Box<dyn Mixer> {
Box::new(M::open(device))
fn mk_sink<M: Mixer + 'static>(config: MixerConfig) -> Box<dyn Mixer> {
Box::new(M::open(config))
}
pub fn find<T: AsRef<str>>(name: Option<T>) -> Option<MixerFn> {
match name.as_ref().map(AsRef::as_ref) {
None | Some("softvol") => Some(mk_sink::<SoftMixer>),
pub fn find(name: Option<&str>) -> Option<MixerFn> {
match name {
None | Some(SoftMixer::NAME) => Some(mk_sink::<SoftMixer>),
#[cfg(feature = "alsa-backend")]
Some("alsa") => Some(mk_sink::<AlsaMixer>),
Some(AlsaMixer::NAME) => Some(mk_sink::<AlsaMixer>),
_ => None,
}
}

View file

@ -1,28 +1,40 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use super::AudioFilter;
use super::{MappedCtrl, VolumeCtrl};
use super::{Mixer, MixerConfig};
#[derive(Clone)]
pub struct SoftMixer {
volume: Arc<AtomicUsize>,
// There is no AtomicF64, so we store the f64 as bits in a u64 field.
// It's much faster than a Mutex<f64>.
volume: Arc<AtomicU64>,
volume_ctrl: VolumeCtrl,
}
impl Mixer for SoftMixer {
fn open(_: Option<MixerConfig>) -> SoftMixer {
SoftMixer {
volume: Arc::new(AtomicUsize::new(0xFFFF)),
fn open(config: MixerConfig) -> Self {
let volume_ctrl = config.volume_ctrl;
info!("Mixing with softvol and volume control: {:?}", volume_ctrl);
Self {
volume: Arc::new(AtomicU64::new(f64::to_bits(0.5))),
volume_ctrl,
}
}
fn start(&self) {}
fn stop(&self) {}
fn volume(&self) -> u16 {
self.volume.load(Ordering::Relaxed) as u16
let mapped_volume = f64::from_bits(self.volume.load(Ordering::Relaxed));
self.volume_ctrl.from_mapped(mapped_volume)
}
fn set_volume(&self, volume: u16) {
self.volume.store(volume as usize, Ordering::Relaxed);
let mapped_volume = self.volume_ctrl.to_mapped(volume);
self.volume
.store(mapped_volume.to_bits(), Ordering::Relaxed)
}
fn get_audio_filter(&self) -> Option<Box<dyn AudioFilter + Send>> {
Some(Box::new(SoftVolumeApplier {
volume: self.volume.clone(),
@ -30,17 +42,20 @@ impl Mixer for SoftMixer {
}
}
impl SoftMixer {
pub const NAME: &'static str = "softmixer";
}
struct SoftVolumeApplier {
volume: Arc<AtomicUsize>,
volume: Arc<AtomicU64>,
}
impl AudioFilter for SoftVolumeApplier {
fn modify_stream(&self, data: &mut [f32]) {
let volume = self.volume.load(Ordering::Relaxed) as u16;
if volume != 0xFFFF {
let volume_factor = volume as f64 / 0xFFFF as f64;
fn modify_stream(&self, data: &mut [f64]) {
let volume = f64::from_bits(self.volume.load(Ordering::Relaxed));
if volume < 1.0 {
for x in data.iter_mut() {
*x = (*x as f64 * volume_factor) as f32;
*x *= volume;
}
}
}

View file

@ -2,6 +2,7 @@ use std::cmp::max;
use std::future::Future;
use std::io::{self, Read, Seek, SeekFrom};
use std::pin::Pin;
use std::process::exit;
use std::task::{Context, Poll};
use std::time::{Duration, Instant};
use std::{mem, thread};
@ -13,11 +14,12 @@ use tokio::sync::{mpsc, oneshot};
use crate::audio::{AudioDecrypt, AudioFile, StreamLoaderController};
use crate::audio::{
READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_BEFORE_PLAYBACK_SECONDS,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS,
READ_AHEAD_BEFORE_PLAYBACK, READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS,
};
use crate::audio_backend::Sink;
use crate::config::{Bitrate, NormalisationMethod, NormalisationType, PlayerConfig};
use crate::convert::Converter;
use crate::core::session::Session;
use crate::core::spotify_id::SpotifyId;
use crate::core::util::SeqGenerator;
@ -25,12 +27,10 @@ use crate::decoder::{AudioDecoder, AudioError, AudioPacket, PassthroughDecoder,
use crate::metadata::{AudioItem, FileFormat};
use crate::mixer::AudioFilter;
pub const SAMPLE_RATE: u32 = 44100;
pub const NUM_CHANNELS: u8 = 2;
pub const SAMPLES_PER_SECOND: u32 = SAMPLE_RATE as u32 * NUM_CHANNELS as u32;
use crate::{NUM_CHANNELS, SAMPLES_PER_SECOND};
const PRELOAD_NEXT_TRACK_BEFORE_END_DURATION_MS: u32 = 30000;
const DB_VOLTAGE_RATIO: f32 = 20.0;
pub const DB_VOLTAGE_RATIO: f64 = 20.0;
pub struct Player {
commands: Option<mpsc::UnboundedSender<PlayerCommand>>,
@ -59,13 +59,14 @@ struct PlayerInternal {
sink_event_callback: Option<SinkEventCallback>,
audio_filter: Option<Box<dyn AudioFilter + Send>>,
event_senders: Vec<mpsc::UnboundedSender<PlayerEvent>>,
converter: Converter,
limiter_active: bool,
limiter_attack_counter: u32,
limiter_release_counter: u32,
limiter_peak_sample: f32,
limiter_factor: f32,
limiter_strength: f32,
limiter_peak_sample: f64,
limiter_factor: f64,
limiter_strength: f64,
}
enum PlayerCommand {
@ -196,6 +197,14 @@ impl PlayerEvent {
pub type PlayerEventChannel = mpsc::UnboundedReceiver<PlayerEvent>;
pub fn db_to_ratio(db: f64) -> f64 {
f64::powf(10.0, db / DB_VOLTAGE_RATIO)
}
pub fn ratio_to_db(ratio: f64) -> f64 {
ratio.log10() * DB_VOLTAGE_RATIO
}
#[derive(Clone, Copy, Debug)]
pub struct NormalisationData {
track_gain_db: f32,
@ -205,14 +214,6 @@ pub struct NormalisationData {
}
impl NormalisationData {
pub fn db_to_ratio(db: f32) -> f32 {
f32::powf(10.0, db / DB_VOLTAGE_RATIO)
}
pub fn ratio_to_db(ratio: f32) -> f32 {
ratio.log10() * DB_VOLTAGE_RATIO
}
fn parse_from_file<T: Read + Seek>(mut file: T) -> io::Result<NormalisationData> {
const SPOTIFY_NORMALIZATION_HEADER_START_OFFSET: u64 = 144;
file.seek(SeekFrom::Start(SPOTIFY_NORMALIZATION_HEADER_START_OFFSET))?;
@ -232,7 +233,7 @@ impl NormalisationData {
Ok(r)
}
fn get_factor(config: &PlayerConfig, data: NormalisationData) -> f32 {
fn get_factor(config: &PlayerConfig, data: NormalisationData) -> f64 {
if !config.normalisation {
return 1.0;
}
@ -242,12 +243,12 @@ impl NormalisationData {
NormalisationType::Track => [data.track_gain_db, data.track_peak],
};
let normalisation_power = gain_db + config.normalisation_pregain;
let mut normalisation_factor = Self::db_to_ratio(normalisation_power);
let normalisation_power = gain_db as f64 + config.normalisation_pregain;
let mut normalisation_factor = db_to_ratio(normalisation_power);
if normalisation_factor * gain_peak > config.normalisation_threshold {
let limited_normalisation_factor = config.normalisation_threshold / gain_peak;
let limited_normalisation_power = Self::ratio_to_db(limited_normalisation_factor);
if normalisation_factor * gain_peak as f64 > config.normalisation_threshold {
let limited_normalisation_factor = config.normalisation_threshold / gain_peak as f64;
let limited_normalisation_power = ratio_to_db(limited_normalisation_factor);
if config.normalisation_method == NormalisationMethod::Basic {
warn!("Limiting gain to {:.2} dB for the duration of this track to stay under normalisation threshold.", limited_normalisation_power);
@ -263,21 +264,9 @@ impl NormalisationData {
}
debug!("Normalisation Data: {:?}", data);
debug!("Normalisation Type: {:?}", config.normalisation_type);
debug!(
"Normalisation Threshold: {:.1}",
Self::ratio_to_db(config.normalisation_threshold)
);
debug!("Normalisation Method: {:?}", config.normalisation_method);
debug!("Normalisation Factor: {}", normalisation_factor);
debug!("Normalisation Factor: {:.2}%", normalisation_factor * 100.0);
if config.normalisation_method == NormalisationMethod::Dynamic {
debug!("Normalisation Attack: {:?}", config.normalisation_attack);
debug!("Normalisation Release: {:?}", config.normalisation_release);
debug!("Normalisation Knee: {:?}", config.normalisation_knee);
}
normalisation_factor
normalisation_factor as f64
}
}
@ -294,9 +283,30 @@ impl Player {
let (cmd_tx, cmd_rx) = mpsc::unbounded_channel();
let (event_sender, event_receiver) = mpsc::unbounded_channel();
if config.normalisation {
debug!("Normalisation Type: {:?}", config.normalisation_type);
debug!(
"Normalisation Pregain: {:.1} dB",
config.normalisation_pregain
);
debug!(
"Normalisation Threshold: {:.1} dBFS",
ratio_to_db(config.normalisation_threshold)
);
debug!("Normalisation Method: {:?}", config.normalisation_method);
if config.normalisation_method == NormalisationMethod::Dynamic {
debug!("Normalisation Attack: {:?}", config.normalisation_attack);
debug!("Normalisation Release: {:?}", config.normalisation_release);
debug!("Normalisation Knee: {:?}", config.normalisation_knee);
}
}
let handle = thread::spawn(move || {
debug!("new Player[{}]", session.session_id());
let converter = Converter::new(config.ditherer);
let internal = PlayerInternal {
session,
config,
@ -309,6 +319,7 @@ impl Player {
sink_event_callback: None,
audio_filter,
event_senders: [event_sender].to_vec(),
converter,
limiter_active: false,
limiter_attack_counter: 0,
@ -412,7 +423,7 @@ impl Drop for Player {
struct PlayerLoadedTrackData {
decoder: Decoder,
normalisation_factor: f32,
normalisation_factor: f64,
stream_loader_controller: StreamLoaderController,
bytes_per_second: usize,
duration_ms: u32,
@ -445,7 +456,7 @@ enum PlayerState {
track_id: SpotifyId,
play_request_id: u64,
decoder: Decoder,
normalisation_factor: f32,
normalisation_factor: f64,
stream_loader_controller: StreamLoaderController,
bytes_per_second: usize,
duration_ms: u32,
@ -456,7 +467,7 @@ enum PlayerState {
track_id: SpotifyId,
play_request_id: u64,
decoder: Decoder,
normalisation_factor: f32,
normalisation_factor: f64,
stream_loader_controller: StreamLoaderController,
bytes_per_second: usize,
duration_ms: u32,
@ -768,7 +779,7 @@ impl PlayerTrackLoader {
}
Err(_) => {
warn!("Unable to extract normalisation data, using default value.");
1.0_f32
1.0
}
};
@ -952,12 +963,12 @@ impl Future for PlayerInternal {
let notify_about_position = match *reported_nominal_start_time {
None => true,
Some(reported_nominal_start_time) => {
// only notify if we're behind. If we're ahead it's probably due to a buffer of the backend and we;re actually in time.
// only notify if we're behind. If we're ahead it's probably due to a buffer of the backend and we're actually in time.
let lag = (Instant::now() - reported_nominal_start_time)
.as_millis()
as i64
- stream_position_millis as i64;
lag > 1000
lag > Duration::from_secs(1).as_millis() as i64
}
};
if notify_about_position {
@ -1044,7 +1055,10 @@ impl PlayerInternal {
}
match self.sink.start() {
Ok(()) => self.sink_status = SinkStatus::Running,
Err(err) => error!("Could not start audio: {}", err),
Err(err) => {
error!("Fatal error, could not start audio sink: {}", err);
exit(1);
}
}
}
}
@ -1053,14 +1067,21 @@ impl PlayerInternal {
match self.sink_status {
SinkStatus::Running => {
trace!("== Stopping sink ==");
self.sink.stop().unwrap();
self.sink_status = if temporarily {
SinkStatus::TemporarilyClosed
} else {
SinkStatus::Closed
};
if let Some(callback) = &mut self.sink_event_callback {
callback(self.sink_status);
match self.sink.stop() {
Ok(()) => {
self.sink_status = if temporarily {
SinkStatus::TemporarilyClosed
} else {
SinkStatus::Closed
};
if let Some(callback) = &mut self.sink_event_callback {
callback(self.sink_status);
}
}
Err(err) => {
error!("Fatal error, could not stop audio sink: {}", err);
exit(1);
}
}
}
SinkStatus::TemporarilyClosed => {
@ -1157,7 +1178,7 @@ impl PlayerInternal {
}
}
fn handle_packet(&mut self, packet: Option<AudioPacket>, normalisation_factor: f32) {
fn handle_packet(&mut self, packet: Option<AudioPacket>, normalisation_factor: f64) {
match packet {
Some(mut packet) => {
if !packet.is_empty() {
@ -1167,7 +1188,7 @@ impl PlayerInternal {
}
if self.config.normalisation
&& !(f32::abs(normalisation_factor - 1.0) <= f32::EPSILON
&& !(f64::abs(normalisation_factor - 1.0) <= f64::EPSILON
&& self.config.normalisation_method == NormalisationMethod::Basic)
{
for sample in data.iter_mut() {
@ -1187,10 +1208,10 @@ impl PlayerInternal {
{
shaped_limiter_strength = 1.0
/ (1.0
+ f32::powf(
+ f64::powf(
shaped_limiter_strength
/ (1.0 - shaped_limiter_strength),
-1.0 * self.config.normalisation_knee,
-self.config.normalisation_knee,
));
}
actual_normalisation_factor =
@ -1198,32 +1219,38 @@ impl PlayerInternal {
+ shaped_limiter_strength * self.limiter_factor;
};
// Cast the fields here for better readability
let normalisation_attack =
self.config.normalisation_attack.as_secs_f64();
let normalisation_release =
self.config.normalisation_release.as_secs_f64();
let limiter_release_counter =
self.limiter_release_counter as f64;
let limiter_attack_counter = self.limiter_attack_counter as f64;
let samples_per_second = SAMPLES_PER_SECOND as f64;
// Always check for peaks, even when the limiter is already active.
// There may be even higher peaks than we initially targeted.
// Check against the normalisation factor that would be applied normally.
let abs_sample =
((*sample as f64 * normalisation_factor as f64) as f32)
.abs();
let abs_sample = f64::abs(*sample * normalisation_factor);
if abs_sample > self.config.normalisation_threshold {
self.limiter_active = true;
if self.limiter_release_counter > 0 {
// A peak was encountered while releasing the limiter;
// synchronize with the current release limiter strength.
self.limiter_attack_counter = (((SAMPLES_PER_SECOND
as f32
* self.config.normalisation_release)
- self.limiter_release_counter as f32)
/ (self.config.normalisation_release
/ self.config.normalisation_attack))
self.limiter_attack_counter = (((samples_per_second
* normalisation_release)
- limiter_release_counter)
/ (normalisation_release / normalisation_attack))
as u32;
self.limiter_release_counter = 0;
}
self.limiter_attack_counter =
self.limiter_attack_counter.saturating_add(1);
self.limiter_strength = self.limiter_attack_counter as f32
/ (SAMPLES_PER_SECOND as f32
* self.config.normalisation_attack);
self.limiter_strength = limiter_attack_counter
/ (samples_per_second * normalisation_attack);
if abs_sample > self.limiter_peak_sample {
self.limiter_peak_sample = abs_sample;
@ -1237,12 +1264,10 @@ impl PlayerInternal {
// the limiter reached full strength. For that reason
// start the release by synchronizing with the current
// attack limiter strength.
self.limiter_release_counter = (((SAMPLES_PER_SECOND
as f32
* self.config.normalisation_attack)
- self.limiter_attack_counter as f32)
* (self.config.normalisation_release
/ self.config.normalisation_attack))
self.limiter_release_counter = (((samples_per_second
* normalisation_attack)
- limiter_attack_counter)
* (normalisation_release / normalisation_attack))
as u32;
self.limiter_attack_counter = 0;
}
@ -1251,23 +1276,19 @@ impl PlayerInternal {
self.limiter_release_counter.saturating_add(1);
if self.limiter_release_counter
> (SAMPLES_PER_SECOND as f32
* self.config.normalisation_release)
as u32
> (samples_per_second * normalisation_release) as u32
{
self.reset_limiter();
} else {
self.limiter_strength = ((SAMPLES_PER_SECOND as f32
* self.config.normalisation_release)
- self.limiter_release_counter as f32)
/ (SAMPLES_PER_SECOND as f32
* self.config.normalisation_release);
self.limiter_strength = ((samples_per_second
* normalisation_release)
- limiter_release_counter)
/ (samples_per_second * normalisation_release);
}
}
}
*sample =
(*sample as f64 * actual_normalisation_factor as f64) as f32;
*sample *= actual_normalisation_factor;
// Extremely sharp attacks, however unlikely, *may* still clip and provide
// undefined results, so strictly enforce output within [-1.0, 1.0].
@ -1280,9 +1301,9 @@ impl PlayerInternal {
}
}
if let Err(err) = self.sink.write(&packet) {
error!("Could not write audio: {}", err);
self.ensure_sink_stopped(false);
if let Err(err) = self.sink.write(&packet, &mut self.converter) {
error!("Fatal error, could not write audio to audio sink: {}", err);
exit(1);
}
}
}
@ -1788,18 +1809,18 @@ impl PlayerInternal {
// Request our read ahead range
let request_data_length = max(
(READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* (0.001 * stream_loader_controller.ping_time_ms() as f64)
* bytes_per_second as f64) as usize,
(READ_AHEAD_DURING_PLAYBACK_SECONDS * bytes_per_second as f64) as usize,
* stream_loader_controller.ping_time().as_secs_f32()
* bytes_per_second as f32) as usize,
(READ_AHEAD_DURING_PLAYBACK.as_secs_f32() * bytes_per_second as f32) as usize,
);
stream_loader_controller.fetch_next(request_data_length);
// Request the part we want to wait for blocking. This effecively means we wait for the previous request to partially complete.
let wait_for_data_length = max(
(READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS
* (0.001 * stream_loader_controller.ping_time_ms() as f64)
* bytes_per_second as f64) as usize,
(READ_AHEAD_BEFORE_PLAYBACK_SECONDS * bytes_per_second as f64) as usize,
* stream_loader_controller.ping_time().as_secs_f32()
* bytes_per_second as f32) as usize,
(READ_AHEAD_BEFORE_PLAYBACK.as_secs_f32() * bytes_per_second as f32) as usize,
);
stream_loader_controller.fetch_next_blocking(wait_for_data_length);
}

View file

@ -3,6 +3,7 @@
pub use librespot_audio as audio;
pub use librespot_connect as connect;
pub use librespot_core as core;
pub use librespot_discovery as discovery;
pub use librespot_metadata as metadata;
pub use librespot_playback as playback;
pub use librespot_protocol as protocol;

View file

@ -9,30 +9,31 @@ use url::Url;
use librespot::connect::spirc::Spirc;
use librespot::core::authentication::Credentials;
use librespot::core::cache::Cache;
use librespot::core::config::{ConnectConfig, DeviceType, SessionConfig, VolumeCtrl};
use librespot::core::config::{ConnectConfig, DeviceType, SessionConfig};
use librespot::core::session::Session;
use librespot::core::version;
use librespot::playback::audio_backend::{self, Sink, BACKENDS};
use librespot::playback::audio_backend::{self, SinkBuilder, BACKENDS};
use librespot::playback::config::{
AudioFormat, Bitrate, NormalisationMethod, NormalisationType, PlayerConfig,
AudioFormat, Bitrate, NormalisationMethod, NormalisationType, PlayerConfig, VolumeCtrl,
};
use librespot::playback::mixer::{self, Mixer, MixerConfig};
use librespot::playback::player::{NormalisationData, Player};
use librespot::playback::dither;
#[cfg(feature = "alsa-backend")]
use librespot::playback::mixer::alsamixer::AlsaMixer;
use librespot::playback::mixer::mappings::MappedCtrl;
use librespot::playback::mixer::{self, MixerConfig, MixerFn};
use librespot::playback::player::{db_to_ratio, Player};
mod player_event_handler;
use player_event_handler::{emit_sink_event, run_program_on_events};
use std::convert::TryFrom;
use std::env;
use std::io::{stderr, Write};
use std::path::Path;
use std::pin::Pin;
use std::process::exit;
use std::str::FromStr;
use std::{env, time::Instant};
use std::{
io::{stderr, Write},
pin::Pin,
};
const MILLIS: f32 = 1000.0;
use std::time::Duration;
use std::time::Instant;
fn device_id(name: &str) -> String {
hex::encode(Sha1::digest(name.as_bytes()))
@ -66,7 +67,7 @@ fn setup_logging(verbose: bool) {
}
fn list_backends() {
println!("Available Backends : ");
println!("Available backends : ");
for (&(name, _), idx) in BACKENDS.iter().zip(0..) {
if idx == 0 {
println!("- {} (default)", name);
@ -169,14 +170,11 @@ fn print_version() {
);
}
#[derive(Clone)]
struct Setup {
format: AudioFormat,
backend: fn(Option<String>, AudioFormat) -> Box<dyn Sink + 'static>,
backend: SinkBuilder,
device: Option<String>,
mixer: fn(Option<MixerConfig>) -> Box<dyn Mixer>,
mixer: MixerFn,
cache: Option<Cache>,
player_config: PlayerConfig,
session_config: SessionConfig,
@ -190,182 +188,242 @@ struct Setup {
}
fn get_setup(args: &[String]) -> Setup {
const AP_PORT: &str = "ap-port";
const AUTOPLAY: &str = "autoplay";
const BACKEND: &str = "backend";
const BITRATE: &str = "b";
const CACHE: &str = "c";
const CACHE_SIZE_LIMIT: &str = "cache-size-limit";
const DEVICE: &str = "device";
const DEVICE_TYPE: &str = "device-type";
const DISABLE_AUDIO_CACHE: &str = "disable-audio-cache";
const DISABLE_DISCOVERY: &str = "disable-discovery";
const DISABLE_GAPLESS: &str = "disable-gapless";
const DITHER: &str = "dither";
const EMIT_SINK_EVENTS: &str = "emit-sink-events";
const ENABLE_VOLUME_NORMALISATION: &str = "enable-volume-normalisation";
const FORMAT: &str = "format";
const HELP: &str = "h";
const INITIAL_VOLUME: &str = "initial-volume";
const MIXER_CARD: &str = "mixer-card";
const MIXER_INDEX: &str = "mixer-index";
const MIXER_NAME: &str = "mixer-name";
const NAME: &str = "name";
const NORMALISATION_ATTACK: &str = "normalisation-attack";
const NORMALISATION_GAIN_TYPE: &str = "normalisation-gain-type";
const NORMALISATION_KNEE: &str = "normalisation-knee";
const NORMALISATION_METHOD: &str = "normalisation-method";
const NORMALISATION_PREGAIN: &str = "normalisation-pregain";
const NORMALISATION_RELEASE: &str = "normalisation-release";
const NORMALISATION_THRESHOLD: &str = "normalisation-threshold";
const ONEVENT: &str = "onevent";
const PASSTHROUGH: &str = "passthrough";
const PASSWORD: &str = "password";
const PROXY: &str = "proxy";
const SYSTEM_CACHE: &str = "system-cache";
const USERNAME: &str = "username";
const VERBOSE: &str = "verbose";
const VERSION: &str = "version";
const VOLUME_CTRL: &str = "volume-ctrl";
const VOLUME_RANGE: &str = "volume-range";
const ZEROCONF_PORT: &str = "zeroconf-port";
let mut opts = getopts::Options::new();
opts.optopt(
"c",
opts.optflag(
HELP,
"help",
"Print this help menu.",
).optopt(
CACHE,
"cache",
"Path to a directory where files will be cached.",
"CACHE",
"PATH",
).optopt(
"",
"system-cache",
"Path to a directory where system files (credentials, volume) will be cached. Can be different from cache option value",
"SYTEMCACHE",
SYSTEM_CACHE,
"Path to a directory where system files (credentials, volume) will be cached. Can be different from cache option value.",
"PATH",
).optopt(
"",
"cache-size-limit",
CACHE_SIZE_LIMIT,
"Limits the size of the cache for audio files.",
"CACHE_SIZE_LIMIT"
).optflag("", "disable-audio-cache", "Disable caching of the audio data.")
.optopt("n", "name", "Device name", "NAME")
.optopt("", "device-type", "Displayed device type", "DEVICE_TYPE")
.optopt(
"b",
"bitrate",
"Bitrate (96, 160 or 320). Defaults to 160",
"BITRATE",
)
.optopt(
"",
"onevent",
"Run PROGRAM when playback is about to begin.",
"PROGRAM",
)
.optflag("", "emit-sink-events", "Run program set by --onevent before sink is opened and after it is closed.")
.optflag("v", "verbose", "Enable verbose output")
.optflag("V", "version", "Display librespot version string")
.optopt("u", "username", "Username to sign in with", "USERNAME")
.optopt("p", "password", "Password", "PASSWORD")
.optopt("", "proxy", "HTTP proxy to use when connecting", "PROXY")
.optopt("", "ap-port", "Connect to AP with specified port. If no AP with that port are present fallback AP will be used. Available ports are usually 80, 443 and 4070", "AP_PORT")
.optflag("", "disable-discovery", "Disable discovery mode")
.optopt(
"",
"backend",
"Audio backend to use. Use '?' to list options",
"BACKEND",
)
.optopt(
"",
"device",
"Audio device to use. Use '?' to list options if using portaudio or alsa",
"DEVICE",
)
.optopt(
"",
"format",
"Output format (F32, S32, S24, S24_3 or S16). Defaults to S16",
"FORMAT",
)
.optopt("", "mixer", "Mixer to use (alsa or softvol)", "MIXER")
.optopt(
"m",
"mixer-name",
"Alsa mixer name, e.g \"PCM\" or \"Master\". Defaults to 'PCM'",
"MIXER_NAME",
)
.optopt(
"",
"mixer-card",
"Alsa mixer card, e.g \"hw:0\" or similar from `aplay -l`. Defaults to 'default' ",
"MIXER_CARD",
)
.optopt(
"",
"mixer-index",
"Alsa mixer index, Index of the cards mixer. Defaults to 0",
"MIXER_INDEX",
)
.optflag(
"",
"mixer-linear-volume",
"Disable alsa's mapped volume scale (cubic). Default false",
)
.optopt(
"",
"initial-volume",
"Initial volume in %, once connected (must be from 0 to 100)",
"VOLUME",
)
.optopt(
"",
"zeroconf-port",
"The port the internal server advertised over zeroconf uses.",
"ZEROCONF_PORT",
)
.optflag(
"",
"enable-volume-normalisation",
"Play all tracks at the same volume",
)
.optopt(
"",
"normalisation-method",
"Specify the normalisation method to use - [basic, dynamic]. Default is dynamic.",
"NORMALISATION_METHOD",
)
.optopt(
"",
"normalisation-gain-type",
"Specify the normalisation gain type to use - [track, album]. Default is album.",
"GAIN_TYPE",
)
.optopt(
"",
"normalisation-pregain",
"Pregain (dB) applied by volume normalisation",
"PREGAIN",
)
.optopt(
"",
"normalisation-threshold",
"Threshold (dBFS) to prevent clipping. Default is -1.0.",
"THRESHOLD",
)
.optopt(
"",
"normalisation-attack",
"Attack time (ms) in which the dynamic limiter is reducing gain. Default is 5.",
"ATTACK",
)
.optopt(
"",
"normalisation-release",
"Release or decay time (ms) in which the dynamic limiter is restoring gain. Default is 100.",
"RELEASE",
)
.optopt(
"",
"normalisation-knee",
"Knee steepness of the dynamic limiter. Default is 1.0.",
"KNEE",
)
.optopt(
"",
"volume-ctrl",
"Volume control type - [linear, log, fixed]. Default is logarithmic",
"VOLUME_CTRL"
)
.optflag(
"",
"autoplay",
"autoplay similar songs when your music ends.",
)
.optflag(
"",
"disable-gapless",
"disable gapless playback.",
)
.optflag(
"",
"passthrough",
"Pass raw stream to output, only works for \"pipe\"."
);
"SIZE"
).optflag("", DISABLE_AUDIO_CACHE, "Disable caching of the audio data.")
.optopt("n", NAME, "Device name.", "NAME")
.optopt("", DEVICE_TYPE, "Displayed device type.", "TYPE")
.optopt(
BITRATE,
"bitrate",
"Bitrate (kbps) {96|160|320}. Defaults to 160.",
"BITRATE",
)
.optopt(
"",
ONEVENT,
"Run PROGRAM when a playback event occurs.",
"PROGRAM",
)
.optflag("", EMIT_SINK_EVENTS, "Run program set by --onevent before sink is opened and after it is closed.")
.optflag("v", VERBOSE, "Enable verbose output.")
.optflag("V", VERSION, "Display librespot version string.")
.optopt("u", USERNAME, "Username to sign in with.", "USERNAME")
.optopt("p", PASSWORD, "Password", "PASSWORD")
.optopt("", PROXY, "HTTP proxy to use when connecting.", "URL")
.optopt("", AP_PORT, "Connect to AP with specified port. If no AP with that port are present fallback AP will be used. Available ports are usually 80, 443 and 4070.", "PORT")
.optflag("", DISABLE_DISCOVERY, "Disable discovery mode.")
.optopt(
"",
BACKEND,
"Audio backend to use. Use '?' to list options.",
"NAME",
)
.optopt(
"",
DEVICE,
"Audio device to use. Use '?' to list options if using alsa, portaudio or rodio.",
"NAME",
)
.optopt(
"",
FORMAT,
"Output format {F64|F32|S32|S24|S24_3|S16}. Defaults to S16.",
"FORMAT",
)
.optopt(
"",
DITHER,
"Specify the dither algorithm to use - [none, gpdf, tpdf, tpdf_hp]. Defaults to 'tpdf' for formats S16, S24, S24_3 and 'none' for other formats.",
"DITHER",
)
.optopt("", "mixer", "Mixer to use {alsa|softvol}.", "MIXER")
.optopt(
"m",
MIXER_NAME,
"Alsa mixer control, e.g. 'PCM' or 'Master'. Defaults to 'PCM'.",
"NAME",
)
.optopt(
"",
MIXER_CARD,
"Alsa mixer card, e.g 'hw:0' or similar from `aplay -l`. Defaults to DEVICE if specified, 'default' otherwise.",
"MIXER_CARD",
)
.optopt(
"",
MIXER_INDEX,
"Alsa index of the cards mixer. Defaults to 0.",
"INDEX",
)
.optopt(
"",
INITIAL_VOLUME,
"Initial volume in % from 0-100. Default for softvol: '50'. For the Alsa mixer: the current volume.",
"VOLUME",
)
.optopt(
"",
ZEROCONF_PORT,
"The port the internal server advertised over zeroconf uses.",
"PORT",
)
.optflag(
"",
ENABLE_VOLUME_NORMALISATION,
"Play all tracks at the same volume.",
)
.optopt(
"",
NORMALISATION_METHOD,
"Specify the normalisation method to use {basic|dynamic}. Defaults to dynamic.",
"METHOD",
)
.optopt(
"",
NORMALISATION_GAIN_TYPE,
"Specify the normalisation gain type to use {track|album}. Defaults to album.",
"TYPE",
)
.optopt(
"",
NORMALISATION_PREGAIN,
"Pregain (dB) applied by volume normalisation. Defaults to 0.",
"PREGAIN",
)
.optopt(
"",
NORMALISATION_THRESHOLD,
"Threshold (dBFS) to prevent clipping. Defaults to -1.0.",
"THRESHOLD",
)
.optopt(
"",
NORMALISATION_ATTACK,
"Attack time (ms) in which the dynamic limiter is reducing gain. Defaults to 5.",
"TIME",
)
.optopt(
"",
NORMALISATION_RELEASE,
"Release or decay time (ms) in which the dynamic limiter is restoring gain. Defaults to 100.",
"TIME",
)
.optopt(
"",
NORMALISATION_KNEE,
"Knee steepness of the dynamic limiter. Defaults to 1.0.",
"KNEE",
)
.optopt(
"",
VOLUME_CTRL,
"Volume control type {cubic|fixed|linear|log}. Defaults to log.",
"VOLUME_CTRL"
)
.optopt(
"",
VOLUME_RANGE,
"Range of the volume control (dB). Default for softvol: 60. For the Alsa mixer: what the control supports.",
"RANGE",
)
.optflag(
"",
AUTOPLAY,
"Automatically play similar songs when your music ends.",
)
.optflag(
"",
DISABLE_GAPLESS,
"Disable gapless playback.",
)
.optflag(
"",
PASSTHROUGH,
"Pass raw stream to output, only works for pipe and subprocess.",
);
let matches = match opts.parse(&args[1..]) {
Ok(m) => m,
Err(f) => {
eprintln!("error: {}\n{}", f.to_string(), usage(&args[0], &opts));
eprintln!(
"Error parsing command line options: {}\n{}",
f,
usage(&args[0], &opts)
);
exit(1);
}
};
if matches.opt_present("version") {
if matches.opt_present(HELP) {
println!("{}", usage(&args[0], &opts));
exit(0);
}
if matches.opt_present(VERSION) {
print_version();
exit(0);
}
let verbose = matches.opt_present("verbose");
let verbose = matches.opt_present(VERBOSE);
setup_logging(verbose);
info!(
@ -376,7 +434,7 @@ fn get_setup(args: &[String]) -> Setup {
build_id = version::BUILD_ID
);
let backend_name = matches.opt_str("backend");
let backend_name = matches.opt_str(BACKEND);
if backend_name == Some("?".into()) {
list_backends();
exit(0);
@ -385,57 +443,95 @@ fn get_setup(args: &[String]) -> Setup {
let backend = audio_backend::find(backend_name).expect("Invalid backend");
let format = matches
.opt_str("format")
.as_ref()
.map(|format| AudioFormat::try_from(format).expect("Invalid output format"))
.opt_str(FORMAT)
.as_deref()
.map(|format| AudioFormat::from_str(format).expect("Invalid output format"))
.unwrap_or_default();
let device = matches.opt_str("device");
let device = matches.opt_str(DEVICE);
if device == Some("?".into()) {
backend(device, format);
exit(0);
}
let mixer_name = matches.opt_str("mixer");
let mixer = mixer::find(mixer_name.as_ref()).expect("Invalid mixer");
let mixer_name = matches.opt_str(MIXER_NAME);
let mixer = mixer::find(mixer_name.as_deref()).expect("Invalid mixer");
let mixer_config = MixerConfig {
card: matches
.opt_str("mixer-card")
.unwrap_or_else(|| String::from("default")),
mixer: matches
.opt_str("mixer-name")
.unwrap_or_else(|| String::from("PCM")),
index: matches
.opt_str("mixer-index")
let mixer_config = {
let card = matches.opt_str(MIXER_CARD).unwrap_or_else(|| {
if let Some(ref device_name) = device {
device_name.to_string()
} else {
MixerConfig::default().card
}
});
let index = matches
.opt_str(MIXER_INDEX)
.map(|index| index.parse::<u32>().unwrap())
.unwrap_or(0),
mapped_volume: !matches.opt_present("mixer-linear-volume"),
.unwrap_or(0);
let control = matches
.opt_str(MIXER_NAME)
.unwrap_or_else(|| MixerConfig::default().control);
let mut volume_range = matches
.opt_str(VOLUME_RANGE)
.map(|range| range.parse::<f64>().unwrap())
.unwrap_or_else(|| match mixer_name.as_deref() {
#[cfg(feature = "alsa-backend")]
Some(AlsaMixer::NAME) => 0.0, // let Alsa query the control
_ => VolumeCtrl::DEFAULT_DB_RANGE,
});
if volume_range < 0.0 {
// User might have specified range as minimum dB volume.
volume_range = -volume_range;
warn!(
"Please enter positive volume ranges only, assuming {:.2} dB",
volume_range
);
}
let volume_ctrl = matches
.opt_str(VOLUME_CTRL)
.as_deref()
.map(|volume_ctrl| {
VolumeCtrl::from_str_with_range(volume_ctrl, volume_range)
.expect("Invalid volume control type")
})
.unwrap_or_else(|| {
let mut volume_ctrl = VolumeCtrl::default();
volume_ctrl.set_db_range(volume_range);
volume_ctrl
});
MixerConfig {
card,
control,
index,
volume_ctrl,
}
};
let cache = {
let audio_dir;
let system_dir;
if matches.opt_present("disable-audio-cache") {
if matches.opt_present(DISABLE_AUDIO_CACHE) {
audio_dir = None;
system_dir = matches
.opt_str("system-cache")
.or_else(|| matches.opt_str("c"))
.opt_str(SYSTEM_CACHE)
.or_else(|| matches.opt_str(CACHE))
.map(|p| p.into());
} else {
let cache_dir = matches.opt_str("c");
let cache_dir = matches.opt_str(CACHE);
audio_dir = cache_dir
.as_ref()
.map(|p| AsRef::<Path>::as_ref(p).join("files"));
system_dir = matches
.opt_str("system-cache")
.opt_str(SYSTEM_CACHE)
.or(cache_dir)
.map(|p| p.into());
}
let limit = if audio_dir.is_some() {
matches
.opt_str("cache-size-limit")
.opt_str(CACHE_SIZE_LIMIT)
.as_deref()
.map(parse_file_size)
.map(|e| {
@ -458,24 +554,28 @@ fn get_setup(args: &[String]) -> Setup {
};
let initial_volume = matches
.opt_str("initial-volume")
.map(|volume| {
let volume = volume.parse::<u16>().unwrap();
.opt_str(INITIAL_VOLUME)
.map(|initial_volume| {
let volume = initial_volume.parse::<u16>().unwrap();
if volume > 100 {
panic!("Initial volume must be in the range 0-100");
error!("Initial volume must be in the range 0-100.");
// the cast will saturate, not necessary to take further action
}
(volume as i32 * 0xFFFF / 100) as u16
(volume as f32 / 100.0 * VolumeCtrl::MAX_VOLUME as f32) as u16
})
.or_else(|| cache.as_ref().and_then(Cache::volume))
.unwrap_or(0x8000);
.or_else(|| match mixer_name.as_deref() {
#[cfg(feature = "alsa-backend")]
Some(AlsaMixer::NAME) => None,
_ => cache.as_ref().and_then(Cache::volume),
});
let zeroconf_port = matches
.opt_str("zeroconf-port")
.opt_str(ZEROCONF_PORT)
.map(|port| port.parse::<u16>().unwrap())
.unwrap_or(0);
let name = matches
.opt_str("name")
.opt_str(NAME)
.unwrap_or_else(|| "Librespot".to_string());
let credentials = {
@ -488,8 +588,8 @@ fn get_setup(args: &[String]) -> Setup {
};
get_credentials(
matches.opt_str("username"),
matches.opt_str("password"),
matches.opt_str(USERNAME),
matches.opt_str(PASSWORD),
cached_credentials,
password,
)
@ -501,12 +601,12 @@ fn get_setup(args: &[String]) -> Setup {
SessionConfig {
user_agent: version::VERSION_STRING.to_string(),
device_id,
proxy: matches.opt_str("proxy").or_else(|| std::env::var("http_proxy").ok()).map(
proxy: matches.opt_str(PROXY).or_else(|| std::env::var("http_proxy").ok()).map(
|s| {
match Url::parse(&s) {
Ok(url) => {
if url.host().is_none() || url.port_or_known_default().is_none() {
panic!("Invalid proxy url, only urls on the format \"http://host:port\" are allowed");
panic!("Invalid proxy url, only URLs on the format \"http://host:port\" are allowed");
}
if url.scheme() != "http" {
@ -514,123 +614,154 @@ fn get_setup(args: &[String]) -> Setup {
}
url
},
Err(err) => panic!("Invalid proxy url: {}, only urls on the format \"http://host:port\" are allowed", err)
Err(err) => panic!("Invalid proxy URL: {}, only URLs in the format \"http://host:port\" are allowed", err)
}
},
),
ap_port: matches
.opt_str("ap-port")
.opt_str(AP_PORT)
.map(|port| port.parse::<u16>().expect("Invalid port")),
}
};
let passthrough = matches.opt_present("passthrough");
let player_config = {
let bitrate = matches
.opt_str("b")
.as_ref()
.opt_str(BITRATE)
.as_deref()
.map(|bitrate| Bitrate::from_str(bitrate).expect("Invalid bitrate"))
.unwrap_or_default();
let gain_type = matches
.opt_str("normalisation-gain-type")
.as_ref()
let gapless = !matches.opt_present(DISABLE_GAPLESS);
let normalisation = matches.opt_present(ENABLE_VOLUME_NORMALISATION);
let normalisation_method = matches
.opt_str(NORMALISATION_METHOD)
.as_deref()
.map(|method| {
NormalisationMethod::from_str(method).expect("Invalid normalisation method")
})
.unwrap_or_default();
let normalisation_type = matches
.opt_str(NORMALISATION_GAIN_TYPE)
.as_deref()
.map(|gain_type| {
NormalisationType::from_str(gain_type).expect("Invalid normalisation type")
})
.unwrap_or_default();
let normalisation_method = matches
.opt_str("normalisation-method")
.as_ref()
.map(|gain_type| {
NormalisationMethod::from_str(gain_type).expect("Invalid normalisation method")
let normalisation_pregain = matches
.opt_str(NORMALISATION_PREGAIN)
.map(|pregain| pregain.parse::<f64>().expect("Invalid pregain float value"))
.unwrap_or(PlayerConfig::default().normalisation_pregain);
let normalisation_threshold = matches
.opt_str(NORMALISATION_THRESHOLD)
.map(|threshold| {
db_to_ratio(
threshold
.parse::<f64>()
.expect("Invalid threshold float value"),
)
})
.unwrap_or_default();
.unwrap_or(PlayerConfig::default().normalisation_threshold);
let normalisation_attack = matches
.opt_str(NORMALISATION_ATTACK)
.map(|attack| {
Duration::from_millis(attack.parse::<u64>().expect("Invalid attack value"))
})
.unwrap_or(PlayerConfig::default().normalisation_attack);
let normalisation_release = matches
.opt_str(NORMALISATION_RELEASE)
.map(|release| {
Duration::from_millis(release.parse::<u64>().expect("Invalid release value"))
})
.unwrap_or(PlayerConfig::default().normalisation_release);
let normalisation_knee = matches
.opt_str(NORMALISATION_KNEE)
.map(|knee| knee.parse::<f64>().expect("Invalid knee float value"))
.unwrap_or(PlayerConfig::default().normalisation_knee);
let ditherer_name = matches.opt_str(DITHER);
let ditherer = match ditherer_name.as_deref() {
// explicitly disabled on command line
Some("none") => None,
// explicitly set on command line
Some(_) => {
if format == AudioFormat::F64 || format == AudioFormat::F32 {
unimplemented!("Dithering is not available on format {:?}", format);
}
Some(dither::find_ditherer(ditherer_name).expect("Invalid ditherer"))
}
// nothing set on command line => use default
None => match format {
AudioFormat::S16 | AudioFormat::S24 | AudioFormat::S24_3 => {
PlayerConfig::default().ditherer
}
_ => None,
},
};
let passthrough = matches.opt_present(PASSTHROUGH);
PlayerConfig {
bitrate,
gapless: !matches.opt_present("disable-gapless"),
normalisation: matches.opt_present("enable-volume-normalisation"),
normalisation_method,
normalisation_type: gain_type,
normalisation_pregain: matches
.opt_str("normalisation-pregain")
.map(|pregain| pregain.parse::<f32>().expect("Invalid pregain float value"))
.unwrap_or(PlayerConfig::default().normalisation_pregain),
normalisation_threshold: matches
.opt_str("normalisation-threshold")
.map(|threshold| {
NormalisationData::db_to_ratio(
threshold
.parse::<f32>()
.expect("Invalid threshold float value"),
)
})
.unwrap_or(PlayerConfig::default().normalisation_threshold),
normalisation_attack: matches
.opt_str("normalisation-attack")
.map(|attack| attack.parse::<f32>().expect("Invalid attack float value") / MILLIS)
.unwrap_or(PlayerConfig::default().normalisation_attack),
normalisation_release: matches
.opt_str("normalisation-release")
.map(|release| {
release.parse::<f32>().expect("Invalid release float value") / MILLIS
})
.unwrap_or(PlayerConfig::default().normalisation_release),
normalisation_knee: matches
.opt_str("normalisation-knee")
.map(|knee| knee.parse::<f32>().expect("Invalid knee float value"))
.unwrap_or(PlayerConfig::default().normalisation_knee),
gapless,
passthrough,
normalisation,
normalisation_type,
normalisation_method,
normalisation_pregain,
normalisation_threshold,
normalisation_attack,
normalisation_release,
normalisation_knee,
ditherer,
}
};
let connect_config = {
let device_type = matches
.opt_str("device-type")
.as_ref()
.opt_str(DEVICE_TYPE)
.as_deref()
.map(|device_type| DeviceType::from_str(device_type).expect("Invalid device type"))
.unwrap_or_default();
let volume_ctrl = matches
.opt_str("volume-ctrl")
.as_ref()
.map(|volume_ctrl| VolumeCtrl::from_str(volume_ctrl).expect("Invalid volume ctrl type"))
.unwrap_or_default();
let has_volume_ctrl = !matches!(mixer_config.volume_ctrl, VolumeCtrl::Fixed);
let autoplay = matches.opt_present(AUTOPLAY);
ConnectConfig {
name,
device_type,
volume: initial_volume,
volume_ctrl,
autoplay: matches.opt_present("autoplay"),
initial_volume,
has_volume_ctrl,
autoplay,
}
};
let enable_discovery = !matches.opt_present("disable-discovery");
let enable_discovery = !matches.opt_present(DISABLE_DISCOVERY);
let player_event_program = matches.opt_str(ONEVENT);
let emit_sink_events = matches.opt_present(EMIT_SINK_EVENTS);
Setup {
format,
backend,
cache,
session_config,
player_config,
connect_config,
credentials,
device,
mixer,
cache,
player_config,
session_config,
connect_config,
mixer_config,
credentials,
enable_discovery,
zeroconf_port,
mixer,
mixer_config,
player_event_program: matches.opt_str("onevent"),
emit_sink_events: matches.opt_present("emit-sink-events"),
player_event_program,
emit_sink_events,
}
}
#[tokio::main(flavor = "current_thread")]
async fn main() {
if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "full")
const RUST_BACKTRACE: &str = "RUST_BACKTRACE";
if env::var(RUST_BACKTRACE).is_err() {
env::set_var(RUST_BACKTRACE, "full")
}
let args: Vec<String> = std::env::args().collect();
@ -645,11 +776,14 @@ async fn main() {
let mut connecting: Pin<Box<dyn future::FusedFuture<Output = _>>> = Box::pin(future::pending());
if setup.enable_discovery {
let config = setup.connect_config.clone();
let device_id = setup.session_config.device_id.clone();
discovery = Some(
librespot_connect::discovery::discovery(config, device_id, setup.zeroconf_port)
librespot::discovery::Discovery::builder(device_id)
.name(setup.connect_config.name.clone())
.device_type(setup.connect_config.device_type)
.port(setup.zeroconf_port)
.launch()
.unwrap(),
);
}
@ -697,7 +831,7 @@ async fn main() {
session = &mut connecting, if !connecting.is_terminated() => match session {
Ok(session) => {
let mixer_config = setup.mixer_config.clone();
let mixer = (setup.mixer)(Some(mixer_config));
let mixer = (setup.mixer)(mixer_config);
let player_config = setup.player_config.clone();
let connect_config = setup.connect_config.clone();
@ -717,14 +851,14 @@ async fn main() {
Ok(e) if e.success() => (),
Ok(e) => {
if let Some(code) = e.code() {
warn!("Sink event prog returned exit code {}", code);
warn!("Sink event program returned exit code {}", code);
} else {
warn!("Sink event prog returned failure");
warn!("Sink event program returned failure");
}
}
},
Err(e) => {
warn!("Emitting sink event failed: {}", e);
}
},
}
})));
}
@ -774,13 +908,21 @@ async fn main() {
tokio::spawn(async move {
match child.wait().await {
Ok(status) if !status.success() => error!("child exited with status {:?}", status.code()),
Err(e) => error!("failed to wait on child process: {}", e),
_ => {}
Ok(e) if e.success() => (),
Ok(e) => {
if let Some(code) = e.code() {
warn!("On event program returned exit code {}", code);
} else {
warn!("On event program returned failure");
}
},
Err(e) => {
warn!("On event program failed: {}", e);
},
}
});
} else {
error!("program failed to start");
warn!("On event program failed to start");
}
}
}