Merge branch 'new-api' into dev

This commit is contained in:
Roderick van Domburg 2022-07-29 23:39:06 +02:00
commit 37794c5b78
No known key found for this signature in database
GPG key ID: 87F5FDE8A56219F4
573 changed files with 22230 additions and 4172 deletions

View file

@ -3,7 +3,7 @@ name: test
on: on:
push: push:
branches: [master, dev] branches: [dev, master, new-api]
paths: paths:
[ [
"**.rs", "**.rs",
@ -37,26 +37,17 @@ on:
env: env:
RUST_BACKTRACE: 1 RUST_BACKTRACE: 1
RUSTFLAGS: -D warnings
# The layering here is as follows, checking in priority from highest to lowest:
# 1. absence of errors and warnings on Linux/x86
# 2. cross compilation on Windows and Linux/ARM
# 3. absence of lints
# 4. code formatting
jobs: jobs:
fmt:
name: rustfmt
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt
- run: cargo fmt --all -- --check
test-linux: test-linux:
needs: fmt name: cargo +${{ matrix.toolchain }} check (${{ matrix.os }})
name: cargo +${{ matrix.toolchain }} build (${{ matrix.os }})
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }} continue-on-error: ${{ matrix.experimental }}
strategy: strategy:
@ -64,14 +55,13 @@ jobs:
matrix: matrix:
os: [ubuntu-latest] os: [ubuntu-latest]
toolchain: toolchain:
- 1.57 # MSRV (Minimum supported rust version) - "1.61" # MSRV (Minimum supported rust version)
- stable - stable
- beta
experimental: [false] experimental: [false]
# Ignore failures in nightly # Ignore failures in beta
include: include:
- os: ubuntu-latest - os: ubuntu-latest
toolchain: nightly toolchain: beta
experimental: true experimental: true
steps: steps:
- name: Checkout code - name: Checkout code
@ -106,22 +96,25 @@ jobs:
- run: cargo test --workspace - run: cargo test --workspace
- run: cargo install cargo-hack - run: cargo install cargo-hack
- run: cargo hack --workspace --remove-dev-deps - run: cargo hack --workspace --remove-dev-deps
- run: cargo build -p librespot-core --no-default-features - run: cargo check -p librespot-core --no-default-features
- run: cargo build -p librespot-core - run: cargo check -p librespot-core
- run: cargo hack build --each-feature -p librespot-discovery - run: cargo hack check --each-feature -p librespot-discovery
- run: cargo hack build --each-feature -p librespot-playback - run: cargo hack check --each-feature -p librespot-playback
- run: cargo hack build --each-feature - run: cargo hack check --each-feature
test-windows: test-windows:
needs: fmt needs: test-linux
name: cargo build (${{ matrix.os }}) name: cargo +${{ matrix.toolchain }} check (${{ matrix.os }})
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
continue-on-error: false
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [windows-latest] os: [windows-latest]
toolchain: [stable] toolchain:
- "1.61" # MSRV (Minimum supported rust version)
- stable
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v2 uses: actions/checkout@v2
@ -153,20 +146,22 @@ jobs:
- run: cargo install cargo-hack - run: cargo install cargo-hack
- run: cargo hack --workspace --remove-dev-deps - run: cargo hack --workspace --remove-dev-deps
- run: cargo build --no-default-features - run: cargo check --no-default-features
- run: cargo build - run: cargo check
test-cross-arm: test-cross-arm:
needs: fmt name: cross +${{ matrix.toolchain }} build ${{ matrix.target }}
needs: test-linux
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
continue-on-error: false continue-on-error: false
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: os: [ubuntu-latest]
- os: ubuntu-latest target: [armv7-unknown-linux-gnueabihf]
target: armv7-unknown-linux-gnueabihf toolchain:
toolchain: stable - "1.61" # MSRV (Minimum supported rust version)
- stable
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v2 uses: actions/checkout@v2
@ -197,3 +192,67 @@ jobs:
run: cargo install cross || true run: cargo install cross || true
- name: Build - name: Build
run: cross build --target ${{ matrix.target }} --no-default-features run: cross build --target ${{ matrix.target }} --no-default-features
clippy:
needs: [test-cross-arm, test-windows]
name: cargo +${{ matrix.toolchain }} clippy (${{ matrix.os }})
runs-on: ${{ matrix.os }}
continue-on-error: false
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
toolchain: [stable]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
override: true
components: clippy
- name: Get Rustc version
id: get-rustc-version
run: echo "::set-output name=version::$(rustc -V)"
shell: bash
- name: Cache Rust dependencies
uses: actions/cache@v2
with:
path: |
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git
target
key: ${{ runner.os }}-${{ steps.get-rustc-version.outputs.version }}-${{ hashFiles('Cargo.lock') }}
- name: Install developer package dependencies
run: sudo apt-get update && sudo apt-get install libpulse-dev portaudio19-dev libasound2-dev libsdl2-dev gstreamer1.0-dev libgstreamer-plugins-base1.0-dev libavahi-compat-libdnssd-dev
- run: cargo install cargo-hack
- run: cargo hack --workspace --remove-dev-deps
- run: cargo clippy -p librespot-core --no-default-features
- run: cargo clippy -p librespot-core
- run: cargo hack clippy --each-feature -p librespot-discovery
- run: cargo hack clippy --each-feature -p librespot-playback
- run: cargo hack clippy --each-feature
fmt:
needs: clippy
name: cargo +${{ matrix.toolchain }} fmt
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt
- run: cargo fmt --all -- --check

3
.gitignore vendored
View file

@ -5,5 +5,4 @@ spotify_appkey.key
.project .project
.history .history
*.save *.save
*.*~

View file

@ -7,7 +7,7 @@ In order to compile librespot, you will first need to set up a suitable Rust bui
### Install Rust ### Install Rust
The easiest, and recommended way to get Rust is to use [rustup](https://rustup.rs). Once thats installed, Rust's standard tools should be set up and ready to use. The easiest, and recommended way to get Rust is to use [rustup](https://rustup.rs). Once thats installed, Rust's standard tools should be set up and ready to use.
*Note: The current minimum required Rust version at the time of writing is 1.57, you can find the current minimum version specified in the `.github/workflow/test.yml` file.* *Note: The current minimum supported Rust version at the time of writing is 1.61.*
#### Additional Rust tools - `rustfmt` #### Additional Rust tools - `rustfmt`
To ensure a consistent codebase, we utilise [`rustfmt`](https://github.com/rust-lang/rustfmt) and [`clippy`](https://github.com/rust-lang/rust-clippy), which are installed by default with `rustup` these days, else they can be installed manually with: To ensure a consistent codebase, we utilise [`rustfmt`](https://github.com/rust-lang/rustfmt) and [`clippy`](https://github.com/rust-lang/rust-clippy), which are installed by default with `rustup` these days, else they can be installed manually with:
@ -15,11 +15,11 @@ To ensure a consistent codebase, we utilise [`rustfmt`](https://github.com/rust-
rustup component add rustfmt rustup component add rustfmt
rustup component add clippy rustup component add clippy
``` ```
Using `rustfmt` is not optional, as our CI checks against this repo's rules. Using `cargo fmt` and `cargo clippy` is not optional, as our CI checks against this repo's rules.
### General dependencies ### General dependencies
Along with Rust, you will also require a C compiler. Along with Rust, you will also require a C compiler.
On Debian/Ubuntu, install with: On Debian/Ubuntu, install with:
```shell ```shell
sudo apt-get install build-essential sudo apt-get install build-essential
@ -27,10 +27,10 @@ sudo apt-get install build-essential
``` ```
On Fedora systems, install with: On Fedora systems, install with:
```shell ```shell
sudo dnf install gcc sudo dnf install gcc
``` ```
### Audio library dependencies ### Audio library dependencies
Depending on the chosen backend, specific development libraries are required. Depending on the chosen backend, specific development libraries are required.
*_Note this is an non-exhaustive list, open a PR to add to it!_* *_Note this is an non-exhaustive list, open a PR to add to it!_*
@ -63,7 +63,7 @@ sudo dnf install alsa-lib-devel
The recommended method is to first fork the repo, so that you have a copy that you have read/write access to. After that, its a simple case of cloning your fork. The recommended method is to first fork the repo, so that you have a copy that you have read/write access to. After that, its a simple case of cloning your fork.
```bash ```bash
git clone git@github.com:YOURUSERNAME/librespot.git git clone git@github.com:YOUR_USERNAME/librespot.git
``` ```
## Compiling & Running ## Compiling & Running

908
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -49,18 +49,16 @@ path = "protocol"
version = "0.4.2" version = "0.4.2"
[dependencies] [dependencies]
base64 = "0.13" env_logger = { version = "0.9", default-features = false, features = ["termcolor", "humantime", "atty"] }
env_logger = {version = "0.9", default-features = false, features = ["termcolor","humantime","atty"]}
futures-util = { version = "0.3", default_features = false } futures-util = { version = "0.3", default_features = false }
getopts = "0.2.21" getopts = "0.2"
hex = "0.4" hex = "0.4"
hyper = "0.14"
log = "0.4" log = "0.4"
rpassword = "6.0" rpassword = "7.0"
sha1 = "0.10"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros", "signal", "sync", "process"] } tokio = { version = "1", features = ["rt", "macros", "signal", "sync", "parking_lot", "process"] }
url = "2.2" url = "2.2"
sha-1 = "0.9"
[features] [features]
alsa-backend = ["librespot-playback/alsa-backend"] alsa-backend = ["librespot-playback/alsa-backend"]
@ -72,7 +70,9 @@ rodiojack-backend = ["librespot-playback/rodiojack-backend"]
sdl-backend = ["librespot-playback/sdl-backend"] sdl-backend = ["librespot-playback/sdl-backend"]
gstreamer-backend = ["librespot-playback/gstreamer-backend"] gstreamer-backend = ["librespot-playback/gstreamer-backend"]
with-dns-sd = ["librespot-discovery/with-dns-sd"] with-dns-sd = ["librespot-core/with-dns-sd", "librespot-discovery/with-dns-sd"]
passthrough-decoder = ["librespot-playback/passthrough-decoder"]
default = ["rodio-backend"] default = ["rodio-backend"]

View file

@ -12,10 +12,15 @@ path = "../core"
version = "0.4.2" version = "0.4.2"
[dependencies] [dependencies]
aes-ctr = "0.6" aes = "0.8"
byteorder = "1.4" byteorder = "1.4"
bytes = "1.0" bytes = "1"
ctr = "0.9"
futures-core = "0.3"
futures-util = "0.3"
hyper = { version = "0.14", features = ["client"] }
log = "0.4" log = "0.4"
futures-util = { version = "0.3", default_features = false } parking_lot = { version = "0.12", features = ["deadlock_detection"] }
tempfile = "3.1" tempfile = "3"
tokio = { version = "1", features = ["sync", "macros"] } thiserror = "1.0"
tokio = { version = "1", features = ["macros", "parking_lot", "sync"] }

View file

@ -1,8 +1,8 @@
use std::io; use std::io;
use aes_ctr::cipher::generic_array::GenericArray; use aes::cipher::{KeyIvInit, StreamCipher, StreamCipherSeek};
use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher, SyncStreamCipherSeek};
use aes_ctr::Aes128Ctr; type Aes128Ctr = ctr::Ctr128BE<aes::Aes128>;
use librespot_core::audio_key::AudioKey; use librespot_core::audio_key::AudioKey;
@ -11,16 +11,20 @@ const AUDIO_AESIV: [u8; 16] = [
]; ];
pub struct AudioDecrypt<T: io::Read> { pub struct AudioDecrypt<T: io::Read> {
cipher: Aes128Ctr, // a `None` cipher is a convenience to make `AudioDecrypt` pass files unaltered
cipher: Option<Aes128Ctr>,
reader: T, reader: T,
} }
impl<T: io::Read> AudioDecrypt<T> { impl<T: io::Read> AudioDecrypt<T> {
pub fn new(key: AudioKey, reader: T) -> AudioDecrypt<T> { pub fn new(key: Option<AudioKey>, reader: T) -> AudioDecrypt<T> {
let cipher = Aes128Ctr::new( let cipher = if let Some(key) = key {
GenericArray::from_slice(&key.0), Aes128Ctr::new_from_slices(&key.0, &AUDIO_AESIV).ok()
GenericArray::from_slice(&AUDIO_AESIV), } else {
); // some files are unencrypted
None
};
AudioDecrypt { cipher, reader } AudioDecrypt { cipher, reader }
} }
} }
@ -29,7 +33,9 @@ impl<T: io::Read> io::Read for AudioDecrypt<T> {
fn read(&mut self, output: &mut [u8]) -> io::Result<usize> { fn read(&mut self, output: &mut [u8]) -> io::Result<usize> {
let len = self.reader.read(output)?; let len = self.reader.read(output)?;
self.cipher.apply_keystream(&mut output[..len]); if let Some(ref mut cipher) = self.cipher {
cipher.apply_keystream(&mut output[..len]);
}
Ok(len) Ok(len)
} }
@ -39,7 +45,9 @@ impl<T: io::Read + io::Seek> io::Seek for AudioDecrypt<T> {
fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> { fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> {
let newpos = self.reader.seek(pos)?; let newpos = self.reader.seek(pos)?;
self.cipher.seek(newpos); if let Some(ref mut cipher) = self.cipher {
cipher.seek(newpos);
}
Ok(newpos) Ok(newpos)
} }

View file

@ -1,40 +1,82 @@
mod receive; mod receive;
use std::cmp::{max, min}; use std::{
use std::fs; cmp::{max, min},
use std::io::{self, Read, Seek, SeekFrom}; fs,
use std::sync::atomic::{self, AtomicUsize}; io::{self, Read, Seek, SeekFrom},
use std::sync::{Arc, Condvar, Mutex}; sync::{
use std::time::{Duration, Instant}; atomic::{AtomicBool, AtomicUsize, Ordering},
Arc,
},
time::{Duration, Instant},
};
use byteorder::{BigEndian, ByteOrder}; use futures_util::{future::IntoStream, StreamExt, TryFutureExt};
use futures_util::{future, StreamExt, TryFutureExt, TryStreamExt}; use hyper::{client::ResponseFuture, header::CONTENT_RANGE, Body, Response, StatusCode};
use librespot_core::channel::{ChannelData, ChannelError, ChannelHeaders}; use parking_lot::{Condvar, Mutex};
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use thiserror::Error;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use self::receive::{audio_file_fetch, request_range}; use librespot_core::{cdn_url::CdnUrl, Error, FileId, Session};
use self::receive::audio_file_fetch;
use crate::range_set::{Range, RangeSet}; use crate::range_set::{Range, RangeSet};
pub type AudioFileResult = Result<(), librespot_core::Error>;
#[derive(Error, Debug)]
pub enum AudioFileError {
#[error("other end of channel disconnected")]
Channel,
#[error("required header not found")]
Header,
#[error("streamer received no data")]
NoData,
#[error("no output available")]
Output,
#[error("invalid status code {0}")]
StatusCode(StatusCode),
#[error("wait timeout exceeded")]
WaitTimeout,
}
impl From<AudioFileError> for Error {
fn from(err: AudioFileError) -> Self {
match err {
AudioFileError::Channel => Error::aborted(err),
AudioFileError::Header => Error::unavailable(err),
AudioFileError::NoData => Error::unavailable(err),
AudioFileError::Output => Error::aborted(err),
AudioFileError::StatusCode(_) => Error::failed_precondition(err),
AudioFileError::WaitTimeout => Error::deadline_exceeded(err),
}
}
}
/// The minimum size of a block that is requested from the Spotify servers in one request. /// The minimum size of a block that is requested from the Spotify servers in one request.
/// This is the block size that is typically requested while doing a `seek()` on a file. /// This is the block size that is typically requested while doing a `seek()` on a file.
/// The Symphonia decoder requires this to be a power of 2 and > 32 kB.
/// Note: smaller requests can happen if part of the block is downloaded already. /// Note: smaller requests can happen if part of the block is downloaded already.
const MINIMUM_DOWNLOAD_SIZE: usize = 1024 * 16; pub const MINIMUM_DOWNLOAD_SIZE: usize = 1024 * 128;
/// The minimum network throughput that we expect. Together with the minimum download size,
/// this will determine the time we will wait for a response.
pub const MINIMUM_THROUGHPUT: usize = 8192;
/// The amount of data that is requested when initially opening a file. /// The amount of data that is requested when initially opening a file.
/// Note: if the file is opened to play from the beginning, the amount of data to /// Note: if the file is opened to play from the beginning, the amount of data to
/// read ahead is requested in addition to this amount. If the file is opened to seek to /// read ahead is requested in addition to this amount. If the file is opened to seek to
/// another position, then only this amount is requested on the first request. /// another position, then only this amount is requested on the first request.
const INITIAL_DOWNLOAD_SIZE: usize = 1024 * 16; pub const INITIAL_DOWNLOAD_SIZE: usize = 1024 * 8;
/// The ping time that is used for calculations before a ping time was actually measured. /// The ping time that is used for calculations before a ping time was actually measured.
const INITIAL_PING_TIME_ESTIMATE: Duration = Duration::from_millis(500); pub const INITIAL_PING_TIME_ESTIMATE: Duration = Duration::from_millis(500);
/// If the measured ping time to the Spotify server is larger than this value, it is capped /// If the measured ping time to the Spotify server is larger than this value, it is capped
/// to avoid run-away block sizes and pre-fetching. /// to avoid run-away block sizes and pre-fetching.
const MAXIMUM_ASSUMED_PING_TIME: Duration = Duration::from_millis(1500); pub const MAXIMUM_ASSUMED_PING_TIME: Duration = Duration::from_millis(1500);
/// Before playback starts, this many seconds of data must be present. /// Before playback starts, this many seconds of data must be present.
/// Note: the calculations are done using the nominal bitrate of the file. The actual amount /// Note: the calculations are done using the nominal bitrate of the file. The actual amount
@ -63,7 +105,7 @@ pub const READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS: f32 = 10.0;
/// If the amount of data that is pending (requested but not received) is less than a certain amount, /// If the amount of data that is pending (requested but not received) is less than a certain amount,
/// data is pre-fetched in addition to the read ahead settings above. The threshold for requesting more /// data is pre-fetched in addition to the read ahead settings above. The threshold for requesting more
/// data is calculated as `<pending bytes> < PREFETCH_THRESHOLD_FACTOR * <ping time> * <nominal data rate>` /// data is calculated as `<pending bytes> < PREFETCH_THRESHOLD_FACTOR * <ping time> * <nominal data rate>`
const PREFETCH_THRESHOLD_FACTOR: f32 = 4.0; pub const PREFETCH_THRESHOLD_FACTOR: f32 = 4.0;
/// Similar to `PREFETCH_THRESHOLD_FACTOR`, but it also takes the current download rate into account. /// Similar to `PREFETCH_THRESHOLD_FACTOR`, but it also takes the current download rate into account.
/// The formula used is `<pending bytes> < FAST_PREFETCH_THRESHOLD_FACTOR * <ping time> * <measured download rate>` /// The formula used is `<pending bytes> < FAST_PREFETCH_THRESHOLD_FACTOR * <ping time> * <measured download rate>`
@ -72,16 +114,17 @@ const PREFETCH_THRESHOLD_FACTOR: f32 = 4.0;
/// the download rate ramps up. However, this comes at the cost that it might hurt ping time if a seek is /// the download rate ramps up. However, this comes at the cost that it might hurt ping time if a seek is
/// performed while downloading. Values smaller than `1.0` cause the download rate to collapse and effectively /// performed while downloading. Values smaller than `1.0` cause the download rate to collapse and effectively
/// only `PREFETCH_THRESHOLD_FACTOR` is in effect. Thus, set to `0.0` if bandwidth saturation is not wanted. /// only `PREFETCH_THRESHOLD_FACTOR` is in effect. Thus, set to `0.0` if bandwidth saturation is not wanted.
const FAST_PREFETCH_THRESHOLD_FACTOR: f32 = 1.5; pub const FAST_PREFETCH_THRESHOLD_FACTOR: f32 = 1.5;
/// Limit the number of requests that are pending simultaneously before pre-fetching data. Pending /// Limit the number of requests that are pending simultaneously before pre-fetching data. Pending
/// requests share bandwidth. Thus, havint too many requests can lead to the one that is needed next /// requests share bandwidth. Thus, having too many requests can lead to the one that is needed next
/// for playback to be delayed leading to a buffer underrun. This limit has the effect that a new /// for playback to be delayed leading to a buffer underrun. This limit has the effect that a new
/// pre-fetch request is only sent if less than `MAX_PREFETCH_REQUESTS` are pending. /// pre-fetch request is only sent if less than `MAX_PREFETCH_REQUESTS` are pending.
const MAX_PREFETCH_REQUESTS: usize = 4; pub const MAX_PREFETCH_REQUESTS: usize = 4;
/// The time we will wait to obtain status updates on downloading. /// The time we will wait to obtain status updates on downloading.
const DOWNLOAD_TIMEOUT: Duration = Duration::from_secs(1); pub const DOWNLOAD_TIMEOUT: Duration =
Duration::from_secs((MINIMUM_DOWNLOAD_SIZE / MINIMUM_THROUGHPUT) as u64);
pub enum AudioFile { pub enum AudioFile {
Cached(fs::File), Cached(fs::File),
@ -89,11 +132,20 @@ pub enum AudioFile {
} }
#[derive(Debug)] #[derive(Debug)]
enum StreamLoaderCommand { pub struct StreamingRequest {
Fetch(Range), // signal the stream loader to fetch a range of the file streamer: IntoStream<ResponseFuture>,
RandomAccessMode(), // optimise download strategy for random access initial_response: Option<Response<Body>>,
StreamMode(), // optimise download strategy for streaming offset: usize,
Close(), // terminate and don't load any more data length: usize,
request_time: Instant,
}
#[derive(Debug)]
pub enum StreamLoaderCommand {
Fetch(Range), // signal the stream loader to fetch a range of the file
RandomAccessMode, // optimise download strategy for random access
StreamMode, // optimise download strategy for streaming
Close, // terminate and don't load any more data
} }
#[derive(Clone)] #[derive(Clone)]
@ -113,33 +165,40 @@ impl StreamLoaderController {
} }
pub fn range_available(&self, range: Range) -> bool { pub fn range_available(&self, range: Range) -> bool {
if let Some(ref shared) = self.stream_shared { let available = if let Some(ref shared) = self.stream_shared {
let download_status = shared.download_status.lock().unwrap(); let download_status = shared.download_status.lock();
range.length range.length
<= download_status <= download_status
.downloaded .downloaded
.contained_length_from_value(range.start) .contained_length_from_value(range.start)
} else { } else {
range.length <= self.len() - range.start range.length <= self.len() - range.start
} };
available
} }
pub fn range_to_end_available(&self) -> bool { pub fn range_to_end_available(&self) -> bool {
self.stream_shared.as_ref().map_or(true, |shared| { match self.stream_shared {
let read_position = shared.read_position.load(atomic::Ordering::Relaxed); Some(ref shared) => {
self.range_available(Range::new(read_position, self.len() - read_position)) let read_position = shared.read_position.load(Ordering::Acquire);
}) self.range_available(Range::new(read_position, self.len() - read_position))
}
None => true,
}
} }
pub fn ping_time(&self) -> Duration { pub fn ping_time(&self) -> Duration {
Duration::from_millis(self.stream_shared.as_ref().map_or(0, |shared| { Duration::from_millis(self.stream_shared.as_ref().map_or(0, |shared| {
shared.ping_time_ms.load(atomic::Ordering::Relaxed) as u64 shared.ping_time_ms.load(Ordering::Relaxed) as u64
})) }))
} }
fn send_stream_loader_command(&self, command: StreamLoaderCommand) { fn send_stream_loader_command(&self, command: StreamLoaderCommand) {
if let Some(ref channel) = self.channel_tx { if let Some(ref channel) = self.channel_tx {
// ignore the error in case the channel has been closed already. // Ignore the error in case the channel has been closed already.
// This means that the file was completely downloaded.
let _ = channel.send(command); let _ = channel.send(command);
} }
} }
@ -149,7 +208,7 @@ impl StreamLoaderController {
self.send_stream_loader_command(StreamLoaderCommand::Fetch(range)); self.send_stream_loader_command(StreamLoaderCommand::Fetch(range));
} }
pub fn fetch_blocking(&self, mut range: Range) { pub fn fetch_blocking(&self, mut range: Range) -> AudioFileResult {
// signal the stream loader to tech a range of the file and block until it is loaded. // signal the stream loader to tech a range of the file and block until it is loaded.
// ensure the range is within the file's bounds. // ensure the range is within the file's bounds.
@ -162,17 +221,21 @@ impl StreamLoaderController {
self.fetch(range); self.fetch(range);
if let Some(ref shared) = self.stream_shared { if let Some(ref shared) = self.stream_shared {
let mut download_status = shared.download_status.lock().unwrap(); let mut download_status = shared.download_status.lock();
while range.length while range.length
> download_status > download_status
.downloaded .downloaded
.contained_length_from_value(range.start) .contained_length_from_value(range.start)
{ {
download_status = shared if shared
.cond .cond
.wait_timeout(download_status, DOWNLOAD_TIMEOUT) .wait_for(&mut download_status, DOWNLOAD_TIMEOUT)
.unwrap() .timed_out()
.0; {
return Err(AudioFileError::WaitTimeout.into());
}
if range.length if range.length
> (download_status > (download_status
.downloaded .downloaded
@ -185,41 +248,73 @@ impl StreamLoaderController {
} }
} }
} }
Ok(())
} }
#[allow(dead_code)]
pub fn fetch_next(&self, length: usize) { pub fn fetch_next(&self, length: usize) {
if let Some(ref shared) = self.stream_shared { if let Some(ref shared) = self.stream_shared {
let range = Range { let range = Range {
start: shared.read_position.load(atomic::Ordering::Relaxed), start: shared.read_position.load(Ordering::Acquire),
length, length,
}; };
self.fetch(range) self.fetch(range);
} }
} }
pub fn fetch_next_blocking(&self, length: usize) { #[allow(dead_code)]
if let Some(ref shared) = self.stream_shared { pub fn fetch_next_blocking(&self, length: usize) -> AudioFileResult {
let range = Range { match self.stream_shared {
start: shared.read_position.load(atomic::Ordering::Relaxed), Some(ref shared) => {
length, let range = Range {
}; start: shared.read_position.load(Ordering::Acquire),
self.fetch_blocking(range); length,
};
self.fetch_blocking(range)
}
None => Ok(()),
}
}
pub fn fetch_next_and_wait(
&self,
request_length: usize,
wait_length: usize,
) -> AudioFileResult {
match self.stream_shared {
Some(ref shared) => {
let start = shared.read_position.load(Ordering::Acquire);
let request_range = Range {
start,
length: request_length,
};
self.fetch(request_range);
let wait_range = Range {
start,
length: wait_length,
};
self.fetch_blocking(wait_range)
}
None => Ok(()),
} }
} }
pub fn set_random_access_mode(&self) { pub fn set_random_access_mode(&self) {
// optimise download strategy for random access // optimise download strategy for random access
self.send_stream_loader_command(StreamLoaderCommand::RandomAccessMode()); self.send_stream_loader_command(StreamLoaderCommand::RandomAccessMode);
} }
pub fn set_stream_mode(&self) { pub fn set_stream_mode(&self) {
// optimise download strategy for streaming // optimise download strategy for streaming
self.send_stream_loader_command(StreamLoaderCommand::StreamMode()); self.send_stream_loader_command(StreamLoaderCommand::StreamMode);
} }
pub fn close(&self) { pub fn close(&self) {
// terminate stream loading and don't load any more data for this file. // terminate stream loading and don't load any more data for this file.
self.send_stream_loader_command(StreamLoaderCommand::Close()); self.send_stream_loader_command(StreamLoaderCommand::Close);
} }
} }
@ -235,19 +330,13 @@ struct AudioFileDownloadStatus {
downloaded: RangeSet, downloaded: RangeSet,
} }
#[derive(Copy, Clone, PartialEq, Eq)]
enum DownloadStrategy {
RandomAccess(),
Streaming(),
}
struct AudioFileShared { struct AudioFileShared {
file_id: FileId, cdn_url: CdnUrl,
file_size: usize, file_size: usize,
stream_data_rate: usize, bytes_per_second: usize,
cond: Condvar, cond: Condvar,
download_status: Mutex<AudioFileDownloadStatus>, download_status: Mutex<AudioFileDownloadStatus>,
download_strategy: Mutex<DownloadStrategy>, download_streaming: AtomicBool,
number_of_open_requests: AtomicUsize, number_of_open_requests: AtomicUsize,
ping_time_ms: AtomicUsize, ping_time_ms: AtomicUsize,
read_position: AtomicUsize, read_position: AtomicUsize,
@ -258,8 +347,7 @@ impl AudioFile {
session: &Session, session: &Session,
file_id: FileId, file_id: FileId,
bytes_per_second: usize, bytes_per_second: usize,
play_from_beginning: bool, ) -> Result<AudioFile, Error> {
) -> Result<AudioFile, ChannelError> {
if let Some(file) = session.cache().and_then(|cache| cache.file(file_id)) { if let Some(file) = session.cache().and_then(|cache| cache.file(file_id)) {
debug!("File {} already in cache", file_id); debug!("File {} already in cache", file_id);
return Ok(AudioFile::Cached(file)); return Ok(AudioFile::Cached(file));
@ -268,48 +356,30 @@ impl AudioFile {
debug!("Downloading file {}", file_id); debug!("Downloading file {}", file_id);
let (complete_tx, complete_rx) = oneshot::channel(); let (complete_tx, complete_rx) = oneshot::channel();
let mut initial_data_length = if play_from_beginning {
INITIAL_DOWNLOAD_SIZE
+ max(
(READ_AHEAD_DURING_PLAYBACK.as_secs_f32() * bytes_per_second as f32) as usize,
(INITIAL_PING_TIME_ESTIMATE.as_secs_f32()
* READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* bytes_per_second as f32) as usize,
)
} else {
INITIAL_DOWNLOAD_SIZE
};
if initial_data_length % 4 != 0 {
initial_data_length += 4 - (initial_data_length % 4);
}
let (headers, data) = request_range(session, file_id, 0, initial_data_length).split();
let streaming = AudioFileStreaming::open( let streaming =
session.clone(), AudioFileStreaming::open(session.clone(), file_id, complete_tx, bytes_per_second);
data,
initial_data_length,
Instant::now(),
headers,
file_id,
complete_tx,
bytes_per_second,
);
let session_ = session.clone(); let session_ = session.clone();
session.spawn(complete_rx.map_ok(move |mut file| { session.spawn(complete_rx.map_ok(move |mut file| {
debug!("Downloading file {} complete", file_id);
if let Some(cache) = session_.cache() { if let Some(cache) = session_.cache() {
debug!("File {} complete, saving to cache", file_id); if let Some(cache_id) = cache.file_path(file_id) {
cache.save_file(file_id, &mut file); if let Err(e) = cache.save_file(file_id, &mut file) {
} else { error!("Error caching file {} to {:?}: {}", file_id, cache_id, e);
debug!("File {} complete", file_id); } else {
debug!("File {} cached to {:?}", file_id, cache_id);
}
}
} }
})); }));
Ok(AudioFile::Streaming(streaming.await?)) Ok(AudioFile::Streaming(streaming.await?))
} }
pub fn get_stream_loader_controller(&self) -> StreamLoaderController { pub fn get_stream_loader_controller(&self) -> Result<StreamLoaderController, Error> {
match self { let controller = match self {
AudioFile::Streaming(ref stream) => StreamLoaderController { AudioFile::Streaming(ref stream) => StreamLoaderController {
channel_tx: Some(stream.stream_loader_command_tx.clone()), channel_tx: Some(stream.stream_loader_command_tx.clone()),
stream_shared: Some(stream.shared.clone()), stream_shared: Some(stream.shared.clone()),
@ -318,9 +388,11 @@ impl AudioFile {
AudioFile::Cached(ref file) => StreamLoaderController { AudioFile::Cached(ref file) => StreamLoaderController {
channel_tx: None, channel_tx: None,
stream_shared: None, stream_shared: None,
file_size: file.metadata().unwrap().len() as usize, file_size: file.metadata()?.len() as usize,
}, },
} };
Ok(controller)
} }
pub fn is_cached(&self) -> bool { pub fn is_cached(&self) -> bool {
@ -331,53 +403,85 @@ impl AudioFile {
impl AudioFileStreaming { impl AudioFileStreaming {
pub async fn open( pub async fn open(
session: Session, session: Session,
initial_data_rx: ChannelData,
initial_data_length: usize,
initial_request_sent_time: Instant,
headers: ChannelHeaders,
file_id: FileId, file_id: FileId,
complete_tx: oneshot::Sender<NamedTempFile>, complete_tx: oneshot::Sender<NamedTempFile>,
streaming_data_rate: usize, bytes_per_second: usize,
) -> Result<AudioFileStreaming, ChannelError> { ) -> Result<AudioFileStreaming, Error> {
let (_, data) = headers let cdn_url = CdnUrl::new(file_id).resolve_audio(&session).await?;
.try_filter(|(id, _)| future::ready(*id == 0x3))
.next()
.await
.unwrap()?;
let size = BigEndian::read_u32(&data) as usize * 4; if let Ok(url) = cdn_url.try_get_url() {
trace!("Streaming from {}", url);
}
// When the audio file is really small, this `download_size` may turn out to be
// larger than the audio file we're going to stream later on. This is OK; requesting
// `Content-Range` > `Content-Length` will return the complete file with status code
// 206 Partial Content.
let mut streamer =
session
.spclient()
.stream_from_cdn(&cdn_url, 0, INITIAL_DOWNLOAD_SIZE)?;
// Get the first chunk with the headers to get the file size.
// The remainder of that chunk with possibly also a response body is then
// further processed in `audio_file_fetch`.
let request_time = Instant::now();
let response = streamer.next().await.ok_or(AudioFileError::NoData)??;
let code = response.status();
if code != StatusCode::PARTIAL_CONTENT {
debug!(
"Opening audio file expected partial content but got: {}",
code
);
return Err(AudioFileError::StatusCode(code).into());
}
let header_value = response
.headers()
.get(CONTENT_RANGE)
.ok_or(AudioFileError::Header)?;
let str_value = header_value.to_str()?;
let hyphen_index = str_value.find('-').unwrap_or_default();
let slash_index = str_value.find('/').unwrap_or_default();
let upper_bound: usize = str_value[hyphen_index + 1..slash_index].parse()?;
let file_size = str_value[slash_index + 1..].parse()?;
let initial_request = StreamingRequest {
streamer,
initial_response: Some(response),
offset: 0,
length: upper_bound + 1,
request_time,
};
let shared = Arc::new(AudioFileShared { let shared = Arc::new(AudioFileShared {
file_id, cdn_url,
file_size: size, file_size,
stream_data_rate: streaming_data_rate, bytes_per_second,
cond: Condvar::new(), cond: Condvar::new(),
download_status: Mutex::new(AudioFileDownloadStatus { download_status: Mutex::new(AudioFileDownloadStatus {
requested: RangeSet::new(), requested: RangeSet::new(),
downloaded: RangeSet::new(), downloaded: RangeSet::new(),
}), }),
download_strategy: Mutex::new(DownloadStrategy::RandomAccess()), // start with random access mode until someone tells us otherwise download_streaming: AtomicBool::new(true),
number_of_open_requests: AtomicUsize::new(0), number_of_open_requests: AtomicUsize::new(0),
ping_time_ms: AtomicUsize::new(0), ping_time_ms: AtomicUsize::new(INITIAL_PING_TIME_ESTIMATE.as_millis() as usize),
read_position: AtomicUsize::new(0), read_position: AtomicUsize::new(0),
}); });
let mut write_file = NamedTempFile::new().unwrap(); let write_file = NamedTempFile::new_in(session.config().tmp_dir.clone())?;
write_file.as_file().set_len(size as u64).unwrap(); write_file.as_file().set_len(file_size as u64)?;
write_file.seek(SeekFrom::Start(0)).unwrap();
let read_file = write_file.reopen().unwrap(); let read_file = write_file.reopen()?;
// let (seek_tx, seek_rx) = mpsc::unbounded();
let (stream_loader_command_tx, stream_loader_command_rx) = let (stream_loader_command_tx, stream_loader_command_rx) =
mpsc::unbounded_channel::<StreamLoaderCommand>(); mpsc::unbounded_channel::<StreamLoaderCommand>();
session.spawn(audio_file_fetch( session.spawn(audio_file_fetch(
session.clone(), session.clone(),
shared.clone(), shared.clone(),
initial_data_rx, initial_request,
initial_request_sent_time,
initial_data_length,
write_file, write_file,
stream_loader_command_rx, stream_loader_command_rx,
complete_tx, complete_tx,
@ -401,83 +505,70 @@ impl Read for AudioFileStreaming {
} }
let length = min(output.len(), self.shared.file_size - offset); let length = min(output.len(), self.shared.file_size - offset);
if length == 0 {
return Ok(0);
}
let length_to_request = match *(self.shared.download_strategy.lock().unwrap()) { let length_to_request = if self.shared.download_streaming.load(Ordering::Acquire) {
DownloadStrategy::RandomAccess() => length, // Due to the read-ahead stuff, we potentially request more than the actual request demanded.
DownloadStrategy::Streaming() => { let ping_time_seconds =
// Due to the read-ahead stuff, we potentially request more than the actual request demanded. Duration::from_millis(self.shared.ping_time_ms.load(Ordering::Relaxed) as u64)
let ping_time_seconds = Duration::from_millis( .as_secs_f32();
self.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as u64,
)
.as_secs_f32();
let length_to_request = length let length_to_request = length
+ max( + max(
(READ_AHEAD_DURING_PLAYBACK.as_secs_f32() (READ_AHEAD_DURING_PLAYBACK.as_secs_f32() * self.shared.bytes_per_second as f32)
* self.shared.stream_data_rate as f32) as usize, as usize,
(READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS (READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* ping_time_seconds * ping_time_seconds
* self.shared.stream_data_rate as f32) as usize, * self.shared.bytes_per_second as f32) as usize,
); );
min(length_to_request, self.shared.file_size - offset) min(length_to_request, self.shared.file_size - offset)
} } else {
length
}; };
let mut ranges_to_request = RangeSet::new(); let mut ranges_to_request = RangeSet::new();
ranges_to_request.add_range(&Range::new(offset, length_to_request)); ranges_to_request.add_range(&Range::new(offset, length_to_request));
let mut download_status = self.shared.download_status.lock().unwrap(); let mut download_status = self.shared.download_status.lock();
ranges_to_request.subtract_range_set(&download_status.downloaded); ranges_to_request.subtract_range_set(&download_status.downloaded);
ranges_to_request.subtract_range_set(&download_status.requested); ranges_to_request.subtract_range_set(&download_status.requested);
for &range in ranges_to_request.iter() { for &range in ranges_to_request.iter() {
self.stream_loader_command_tx self.stream_loader_command_tx
.send(StreamLoaderCommand::Fetch(range)) .send(StreamLoaderCommand::Fetch(range))
.unwrap(); .map_err(|err| io::Error::new(io::ErrorKind::BrokenPipe, err))?;
} }
if length == 0 {
return Ok(0);
}
let mut download_message_printed = false;
while !download_status.downloaded.contains(offset) { while !download_status.downloaded.contains(offset) {
if let DownloadStrategy::Streaming() = *self.shared.download_strategy.lock().unwrap() { if self
if !download_message_printed {
debug!("Stream waiting for download of file position {}. Downloaded ranges: {}. Pending ranges: {}", offset, download_status.downloaded, download_status.requested.minus(&download_status.downloaded));
download_message_printed = true;
}
}
download_status = self
.shared .shared
.cond .cond
.wait_timeout(download_status, DOWNLOAD_TIMEOUT) .wait_for(&mut download_status, DOWNLOAD_TIMEOUT)
.unwrap() .timed_out()
.0; {
return Err(io::Error::new(
io::ErrorKind::TimedOut,
Error::deadline_exceeded(AudioFileError::WaitTimeout),
));
}
} }
let available_length = download_status let available_length = download_status
.downloaded .downloaded
.contained_length_from_value(offset); .contained_length_from_value(offset);
assert!(available_length > 0);
drop(download_status); drop(download_status);
self.position = self.read_file.seek(SeekFrom::Start(offset as u64)).unwrap(); self.position = self.read_file.seek(SeekFrom::Start(offset as u64))?;
let read_len = min(length, available_length); let read_len = min(length, available_length);
let read_len = self.read_file.read(&mut output[..read_len])?; let read_len = self.read_file.read(&mut output[..read_len])?;
if download_message_printed {
debug!(
"Read at postion {} completed. {} bytes returned, {} bytes were requested.",
offset,
read_len,
output.len()
);
}
self.position += read_len as u64; self.position += read_len as u64;
self.shared self.shared
.read_position .read_position
.store(self.position as usize, atomic::Ordering::Relaxed); .store(self.position as usize, Ordering::Release);
Ok(read_len) Ok(read_len)
} }
@ -485,11 +576,50 @@ impl Read for AudioFileStreaming {
impl Seek for AudioFileStreaming { impl Seek for AudioFileStreaming {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> { fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
// If we are already at this position, we don't need to switch download mode.
// These checks and locks are less expensive than interrupting streaming.
let current_position = self.position as i64;
let requested_pos = match pos {
SeekFrom::Start(pos) => pos as i64,
SeekFrom::End(pos) => self.shared.file_size as i64 - pos - 1,
SeekFrom::Current(pos) => current_position + pos,
};
if requested_pos == current_position {
return Ok(current_position as u64);
}
// Again if we have already downloaded this part.
let available = self
.shared
.download_status
.lock()
.downloaded
.contains(requested_pos as usize);
let mut was_streaming = false;
if !available {
// Ensure random access mode if we need to download this part.
// Checking whether we are streaming now is a micro-optimization
// to save an atomic load.
was_streaming = self.shared.download_streaming.load(Ordering::Acquire);
if was_streaming {
self.shared
.download_streaming
.store(false, Ordering::Release);
}
}
self.position = self.read_file.seek(pos)?; self.position = self.read_file.seek(pos)?;
// Do not seek past EOF
self.shared self.shared
.read_position .read_position
.store(self.position as usize, atomic::Ordering::Relaxed); .store(self.position as usize, Ordering::Release);
if !available && was_streaming {
self.shared
.download_streaming
.store(true, Ordering::Release);
}
Ok(self.position) Ok(self.position)
} }
} }

View file

@ -1,57 +1,26 @@
use std::cmp::{max, min}; use std::{
use std::io::{Seek, SeekFrom, Write}; cmp::{max, min},
use std::sync::{atomic, Arc}; io::{Seek, SeekFrom, Write},
use std::time::{Duration, Instant}; sync::{atomic::Ordering, Arc},
time::{Duration, Instant},
};
use atomic::Ordering;
use byteorder::{BigEndian, WriteBytesExt};
use bytes::Bytes; use bytes::Bytes;
use futures_util::StreamExt; use futures_util::StreamExt;
use librespot_core::channel::{Channel, ChannelData}; use hyper::StatusCode;
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use librespot_core::{session::Session, Error};
use crate::range_set::{Range, RangeSet}; use crate::range_set::{Range, RangeSet};
use super::{AudioFileShared, DownloadStrategy, StreamLoaderCommand};
use super::{ use super::{
AudioFileError, AudioFileResult, AudioFileShared, StreamLoaderCommand, StreamingRequest,
FAST_PREFETCH_THRESHOLD_FACTOR, MAXIMUM_ASSUMED_PING_TIME, MAX_PREFETCH_REQUESTS, FAST_PREFETCH_THRESHOLD_FACTOR, MAXIMUM_ASSUMED_PING_TIME, MAX_PREFETCH_REQUESTS,
MINIMUM_DOWNLOAD_SIZE, PREFETCH_THRESHOLD_FACTOR, MINIMUM_DOWNLOAD_SIZE, PREFETCH_THRESHOLD_FACTOR,
}; };
pub fn request_range(session: &Session, file: FileId, offset: usize, length: usize) -> Channel {
assert!(
offset % 4 == 0,
"Range request start positions must be aligned by 4 bytes."
);
assert!(
length % 4 == 0,
"Range request range lengths must be aligned by 4 bytes."
);
let start = offset / 4;
let end = (offset + length) / 4;
let (id, channel) = session.channel().allocate();
let mut data: Vec<u8> = Vec::new();
data.write_u16::<BigEndian>(id).unwrap();
data.write_u8(0).unwrap();
data.write_u8(1).unwrap();
data.write_u16::<BigEndian>(0x0000).unwrap();
data.write_u32::<BigEndian>(0x00000000).unwrap();
data.write_u32::<BigEndian>(0x00009C40).unwrap();
data.write_u32::<BigEndian>(0x00020000).unwrap();
data.write(&file.0).unwrap();
data.write_u32::<BigEndian>(start as u32).unwrap();
data.write_u32::<BigEndian>(end as u32).unwrap();
session.send_packet(0x8, data);
channel
}
struct PartialFileData { struct PartialFileData {
offset: usize, offset: usize,
data: Bytes, data: Bytes,
@ -65,13 +34,10 @@ enum ReceivedData {
async fn receive_data( async fn receive_data(
shared: Arc<AudioFileShared>, shared: Arc<AudioFileShared>,
file_data_tx: mpsc::UnboundedSender<ReceivedData>, file_data_tx: mpsc::UnboundedSender<ReceivedData>,
mut data_rx: ChannelData, mut request: StreamingRequest,
initial_data_offset: usize, ) -> AudioFileResult {
initial_request_length: usize, let mut offset = request.offset;
request_sent_time: Instant, let mut actual_length = 0;
) {
let mut data_offset = initial_data_offset;
let mut request_length = initial_request_length;
let old_number_of_request = shared let old_number_of_request = shared
.number_of_open_requests .number_of_open_requests
@ -79,65 +45,80 @@ async fn receive_data(
let mut measure_ping_time = old_number_of_request == 0; let mut measure_ping_time = old_number_of_request == 0;
let result = loop { let result: Result<_, Error> = loop {
let data = match data_rx.next().await { let response = match request.initial_response.take() {
Some(Ok(data)) => data, Some(data) => data,
Some(Err(e)) => break Err(e), None => match request.streamer.next().await {
None => break Ok(()), Some(Ok(response)) => response,
Some(Err(e)) => break Err(e.into()),
None => {
if actual_length != request.length {
let msg = format!("did not expect body to contain {} bytes", actual_length);
break Err(Error::data_loss(msg));
}
break Ok(());
}
},
}; };
if measure_ping_time { let code = response.status();
let mut duration = Instant::now() - request_sent_time; if code != StatusCode::PARTIAL_CONTENT {
if duration > MAXIMUM_ASSUMED_PING_TIME { break Err(AudioFileError::StatusCode(code).into());
duration = MAXIMUM_ASSUMED_PING_TIME;
}
let _ = file_data_tx.send(ReceivedData::ResponseTime(duration));
measure_ping_time = false;
}
let data_size = data.len();
let _ = file_data_tx.send(ReceivedData::Data(PartialFileData {
offset: data_offset,
data,
}));
data_offset += data_size;
if request_length < data_size {
warn!(
"Data receiver for range {} (+{}) received more data from server than requested.",
initial_data_offset, initial_request_length
);
request_length = 0;
} else {
request_length -= data_size;
} }
if request_length == 0 { let body = response.into_body();
break Ok(()); let data = match hyper::body::to_bytes(body).await {
Ok(bytes) => bytes,
Err(e) => break Err(e.into()),
};
let data_size = data.len();
file_data_tx.send(ReceivedData::Data(PartialFileData { offset, data }))?;
actual_length += data_size;
offset += data_size;
if measure_ping_time {
let mut duration = Instant::now() - request.request_time;
if duration > MAXIMUM_ASSUMED_PING_TIME {
warn!(
"Ping time {} ms exceeds maximum {}, setting to maximum",
duration.as_millis(),
MAXIMUM_ASSUMED_PING_TIME.as_millis()
);
duration = MAXIMUM_ASSUMED_PING_TIME;
}
file_data_tx.send(ReceivedData::ResponseTime(duration))?;
measure_ping_time = false;
} }
}; };
if request_length > 0 { drop(request.streamer);
let missing_range = Range::new(data_offset, request_length);
let mut download_status = shared.download_status.lock().unwrap(); let bytes_remaining = request.length - actual_length;
download_status.requested.subtract_range(&missing_range); if bytes_remaining > 0 {
shared.cond.notify_all(); {
let missing_range = Range::new(offset, bytes_remaining);
let mut download_status = shared.download_status.lock();
download_status.requested.subtract_range(&missing_range);
shared.cond.notify_all();
}
} }
shared shared
.number_of_open_requests .number_of_open_requests
.fetch_sub(1, Ordering::SeqCst); .fetch_sub(1, Ordering::SeqCst);
if result.is_err() { if let Err(e) = result {
warn!( error!(
"Error from channel for data receiver for range {} (+{}).", "Streamer error requesting range {} +{}: {:?}",
initial_data_offset, initial_request_length request.offset, request.length, e
);
} else if request_length > 0 {
warn!(
"Data receiver for range {} (+{}) received less data from server than requested.",
initial_data_offset, initial_request_length
); );
return Err(e);
} }
Ok(())
} }
struct AudioFileFetch { struct AudioFileFetch {
@ -158,68 +139,64 @@ enum ControlFlow {
} }
impl AudioFileFetch { impl AudioFileFetch {
fn get_download_strategy(&mut self) -> DownloadStrategy { fn is_download_streaming(&self) -> bool {
*(self.shared.download_strategy.lock().unwrap()) self.shared.download_streaming.load(Ordering::Acquire)
} }
fn download_range(&mut self, mut offset: usize, mut length: usize) { fn download_range(&mut self, offset: usize, mut length: usize) -> AudioFileResult {
if length < MINIMUM_DOWNLOAD_SIZE { if length < MINIMUM_DOWNLOAD_SIZE {
length = MINIMUM_DOWNLOAD_SIZE; length = MINIMUM_DOWNLOAD_SIZE;
} }
// ensure the values are within the bounds and align them by 4 for the spotify protocol.
if offset >= self.shared.file_size {
return;
}
if length == 0 {
return;
}
if offset + length > self.shared.file_size { if offset + length > self.shared.file_size {
length = self.shared.file_size - offset; length = self.shared.file_size - offset;
} }
if offset % 4 != 0 {
length += offset % 4;
offset -= offset % 4;
}
if length % 4 != 0 {
length += 4 - (length % 4);
}
let mut ranges_to_request = RangeSet::new(); let mut ranges_to_request = RangeSet::new();
ranges_to_request.add_range(&Range::new(offset, length)); ranges_to_request.add_range(&Range::new(offset, length));
let mut download_status = self.shared.download_status.lock().unwrap(); // The iteration that follows spawns streamers fast, without awaiting them,
// so holding the lock for the entire scope of this function should be faster
// then locking and unlocking multiple times.
let mut download_status = self.shared.download_status.lock();
ranges_to_request.subtract_range_set(&download_status.downloaded); ranges_to_request.subtract_range_set(&download_status.downloaded);
ranges_to_request.subtract_range_set(&download_status.requested); ranges_to_request.subtract_range_set(&download_status.requested);
// TODO : refresh cdn_url when the token expired
for range in ranges_to_request.iter() { for range in ranges_to_request.iter() {
let (_headers, data) = request_range( let streamer = self.session.spclient().stream_from_cdn(
&self.session, &self.shared.cdn_url,
self.shared.file_id,
range.start, range.start,
range.length, range.length,
) )?;
.split();
download_status.requested.add_range(range); download_status.requested.add_range(range);
let streaming_request = StreamingRequest {
streamer,
initial_response: None,
offset: range.start,
length: range.length,
request_time: Instant::now(),
};
self.session.spawn(receive_data( self.session.spawn(receive_data(
self.shared.clone(), self.shared.clone(),
self.file_data_tx.clone(), self.file_data_tx.clone(),
data, streaming_request,
range.start,
range.length,
Instant::now(),
)); ));
} }
Ok(())
} }
fn pre_fetch_more_data(&mut self, bytes: usize, max_requests_to_send: usize) { fn pre_fetch_more_data(
&mut self,
bytes: usize,
max_requests_to_send: usize,
) -> AudioFileResult {
let mut bytes_to_go = bytes; let mut bytes_to_go = bytes;
let mut requests_to_go = max_requests_to_send; let mut requests_to_go = max_requests_to_send;
@ -228,14 +205,14 @@ impl AudioFileFetch {
let mut missing_data = RangeSet::new(); let mut missing_data = RangeSet::new();
missing_data.add_range(&Range::new(0, self.shared.file_size)); missing_data.add_range(&Range::new(0, self.shared.file_size));
{ {
let download_status = self.shared.download_status.lock().unwrap(); let download_status = self.shared.download_status.lock();
missing_data.subtract_range_set(&download_status.downloaded); missing_data.subtract_range_set(&download_status.downloaded);
missing_data.subtract_range_set(&download_status.requested); missing_data.subtract_range_set(&download_status.requested);
} }
// download data from after the current read position first // download data from after the current read position first
let mut tail_end = RangeSet::new(); let mut tail_end = RangeSet::new();
let read_position = self.shared.read_position.load(Ordering::Relaxed); let read_position = self.shared.read_position.load(Ordering::Acquire);
tail_end.add_range(&Range::new( tail_end.add_range(&Range::new(
read_position, read_position,
self.shared.file_size - read_position, self.shared.file_size - read_position,
@ -246,7 +223,7 @@ impl AudioFileFetch {
let range = tail_end.get_range(0); let range = tail_end.get_range(0);
let offset = range.start; let offset = range.start;
let length = min(range.length, bytes_to_go); let length = min(range.length, bytes_to_go);
self.download_range(offset, length); self.download_range(offset, length)?;
requests_to_go -= 1; requests_to_go -= 1;
bytes_to_go -= length; bytes_to_go -= length;
} else if !missing_data.is_empty() { } else if !missing_data.is_empty() {
@ -254,20 +231,21 @@ impl AudioFileFetch {
let range = missing_data.get_range(0); let range = missing_data.get_range(0);
let offset = range.start; let offset = range.start;
let length = min(range.length, bytes_to_go); let length = min(range.length, bytes_to_go);
self.download_range(offset, length); self.download_range(offset, length)?;
requests_to_go -= 1; requests_to_go -= 1;
bytes_to_go -= length; bytes_to_go -= length;
} else { } else {
return; break;
} }
} }
Ok(())
} }
fn handle_file_data(&mut self, data: ReceivedData) -> ControlFlow { fn handle_file_data(&mut self, data: ReceivedData) -> Result<ControlFlow, Error> {
match data { match data {
ReceivedData::ResponseTime(response_time) => { ReceivedData::ResponseTime(response_time) => {
// chatty let old_ping_time_ms = self.shared.ping_time_ms.load(Ordering::Relaxed);
// trace!("Ping time estimated as: {}ms", response_time.as_millis());
// prune old response times. Keep at most two so we can push a third. // prune old response times. Keep at most two so we can push a third.
while self.network_response_times.len() >= 3 { while self.network_response_times.len() >= 3 {
@ -278,109 +256,131 @@ impl AudioFileFetch {
self.network_response_times.push(response_time); self.network_response_times.push(response_time);
// stats::median is experimental. So we calculate the median of up to three ourselves. // stats::median is experimental. So we calculate the median of up to three ourselves.
let ping_time = match self.network_response_times.len() { let ping_time_ms = {
1 => self.network_response_times[0], let response_time = match self.network_response_times.len() {
2 => (self.network_response_times[0] + self.network_response_times[1]) / 2, 1 => self.network_response_times[0],
3 => { 2 => (self.network_response_times[0] + self.network_response_times[1]) / 2,
let mut times = self.network_response_times.clone(); 3 => {
times.sort_unstable(); let mut times = self.network_response_times.clone();
times[1] times.sort_unstable();
} times[1]
_ => unreachable!(), }
_ => unreachable!(),
};
response_time.as_millis() as usize
}; };
// print when the new estimate deviates by more than 10% from the last
if f32::abs(
(ping_time_ms as f32 - old_ping_time_ms as f32) / old_ping_time_ms as f32,
) > 0.1
{
debug!("Ping time now estimated as: {} ms", ping_time_ms);
}
// store our new estimate for everyone to see // store our new estimate for everyone to see
self.shared self.shared
.ping_time_ms .ping_time_ms
.store(ping_time.as_millis() as usize, Ordering::Relaxed); .store(ping_time_ms, Ordering::Relaxed);
} }
ReceivedData::Data(data) => { ReceivedData::Data(data) => {
self.output match self.output.as_mut() {
.as_mut() Some(output) => {
.unwrap() output.seek(SeekFrom::Start(data.offset as u64))?;
.seek(SeekFrom::Start(data.offset as u64)) output.write_all(data.data.as_ref())?;
.unwrap(); }
self.output None => return Err(AudioFileError::Output.into()),
.as_mut() }
.unwrap()
.write_all(data.data.as_ref())
.unwrap();
let mut download_status = self.shared.download_status.lock().unwrap();
let received_range = Range::new(data.offset, data.data.len()); let received_range = Range::new(data.offset, data.data.len());
download_status.downloaded.add_range(&received_range);
self.shared.cond.notify_all();
let full = download_status.downloaded.contained_length_from_value(0) let full = {
>= self.shared.file_size; let mut download_status = self.shared.download_status.lock();
download_status.downloaded.add_range(&received_range);
self.shared.cond.notify_all();
drop(download_status); download_status.downloaded.contained_length_from_value(0)
>= self.shared.file_size
};
if full { if full {
self.finish(); self.finish()?;
return ControlFlow::Break; return Ok(ControlFlow::Break);
} }
} }
} }
ControlFlow::Continue
Ok(ControlFlow::Continue)
} }
fn handle_stream_loader_command(&mut self, cmd: StreamLoaderCommand) -> ControlFlow { fn handle_stream_loader_command(
&mut self,
cmd: StreamLoaderCommand,
) -> Result<ControlFlow, Error> {
match cmd { match cmd {
StreamLoaderCommand::Fetch(request) => { StreamLoaderCommand::Fetch(request) => {
self.download_range(request.start, request.length); self.download_range(request.start, request.length)?
} }
StreamLoaderCommand::RandomAccessMode() => { StreamLoaderCommand::RandomAccessMode => self
*(self.shared.download_strategy.lock().unwrap()) = DownloadStrategy::RandomAccess(); .shared
} .download_streaming
StreamLoaderCommand::StreamMode() => { .store(false, Ordering::Release),
*(self.shared.download_strategy.lock().unwrap()) = DownloadStrategy::Streaming(); StreamLoaderCommand::StreamMode => self
} .shared
StreamLoaderCommand::Close() => return ControlFlow::Break, .download_streaming
.store(true, Ordering::Release),
StreamLoaderCommand::Close => return Ok(ControlFlow::Break),
} }
ControlFlow::Continue
Ok(ControlFlow::Continue)
} }
fn finish(&mut self) { fn finish(&mut self) -> AudioFileResult {
let mut output = self.output.take().unwrap(); let output = self.output.take();
let complete_tx = self.complete_tx.take().unwrap();
output.seek(SeekFrom::Start(0)).unwrap(); let complete_tx = self.complete_tx.take();
let _ = complete_tx.send(output);
if let Some(mut output) = output {
output.seek(SeekFrom::Start(0))?;
if let Some(complete_tx) = complete_tx {
complete_tx
.send(output)
.map_err(|_| AudioFileError::Channel)?;
}
}
Ok(())
} }
} }
pub(super) async fn audio_file_fetch( pub(super) async fn audio_file_fetch(
session: Session, session: Session,
shared: Arc<AudioFileShared>, shared: Arc<AudioFileShared>,
initial_data_rx: ChannelData, initial_request: StreamingRequest,
initial_request_sent_time: Instant,
initial_data_length: usize,
output: NamedTempFile, output: NamedTempFile,
mut stream_loader_command_rx: mpsc::UnboundedReceiver<StreamLoaderCommand>, mut stream_loader_command_rx: mpsc::UnboundedReceiver<StreamLoaderCommand>,
complete_tx: oneshot::Sender<NamedTempFile>, complete_tx: oneshot::Sender<NamedTempFile>,
) { ) -> AudioFileResult {
let (file_data_tx, mut file_data_rx) = mpsc::unbounded_channel(); let (file_data_tx, mut file_data_rx) = mpsc::unbounded_channel();
{ {
let requested_range = Range::new(0, initial_data_length); let requested_range = Range::new(
let mut download_status = shared.download_status.lock().unwrap(); initial_request.offset,
initial_request.offset + initial_request.length,
);
let mut download_status = shared.download_status.lock();
download_status.requested.add_range(&requested_range); download_status.requested.add_range(&requested_range);
} }
session.spawn(receive_data( session.spawn(receive_data(
shared.clone(), shared.clone(),
file_data_tx.clone(), file_data_tx.clone(),
initial_data_rx, initial_request,
0,
initial_data_length,
initial_request_sent_time,
)); ));
let mut fetch = AudioFileFetch { let mut fetch = AudioFileFetch {
session, session: session.clone(),
shared, shared,
output: Some(output), output: Some(output),
@ -392,25 +392,37 @@ pub(super) async fn audio_file_fetch(
loop { loop {
tokio::select! { tokio::select! {
cmd = stream_loader_command_rx.recv() => { cmd = stream_loader_command_rx.recv() => {
if cmd.map_or(true, |cmd| fetch.handle_stream_loader_command(cmd) == ControlFlow::Break) { match cmd {
break; Some(cmd) => {
if fetch.handle_stream_loader_command(cmd)? == ControlFlow::Break {
break;
}
}
None => break,
}
}
data = file_data_rx.recv() => {
match data {
Some(data) => {
if fetch.handle_file_data(data)? == ControlFlow::Break {
break;
}
}
None => break,
} }
}, },
data = file_data_rx.recv() => { else => (),
if data.map_or(true, |data| fetch.handle_file_data(data) == ControlFlow::Break) {
break;
}
}
} }
if fetch.get_download_strategy() == DownloadStrategy::Streaming() { if fetch.is_download_streaming() {
let number_of_open_requests = let number_of_open_requests =
fetch.shared.number_of_open_requests.load(Ordering::SeqCst); fetch.shared.number_of_open_requests.load(Ordering::SeqCst);
if number_of_open_requests < MAX_PREFETCH_REQUESTS { if number_of_open_requests < MAX_PREFETCH_REQUESTS {
let max_requests_to_send = MAX_PREFETCH_REQUESTS - number_of_open_requests; let max_requests_to_send = MAX_PREFETCH_REQUESTS - number_of_open_requests;
let bytes_pending: usize = { let bytes_pending: usize = {
let download_status = fetch.shared.download_status.lock().unwrap(); let download_status = fetch.shared.download_status.lock();
download_status download_status
.requested .requested
.minus(&download_status.downloaded) .minus(&download_status.downloaded)
@ -425,7 +437,7 @@ pub(super) async fn audio_file_fetch(
let desired_pending_bytes = max( let desired_pending_bytes = max(
(PREFETCH_THRESHOLD_FACTOR (PREFETCH_THRESHOLD_FACTOR
* ping_time_seconds * ping_time_seconds
* fetch.shared.stream_data_rate as f32) as usize, * fetch.shared.bytes_per_second as f32) as usize,
(FAST_PREFETCH_THRESHOLD_FACTOR * ping_time_seconds * download_rate as f32) (FAST_PREFETCH_THRESHOLD_FACTOR * ping_time_seconds * download_rate as f32)
as usize, as usize,
); );
@ -434,9 +446,11 @@ pub(super) async fn audio_file_fetch(
fetch.pre_fetch_more_data( fetch.pre_fetch_more_data(
desired_pending_bytes - bytes_pending, desired_pending_bytes - bytes_pending,
max_requests_to_send, max_requests_to_send,
); )?;
} }
} }
} }
} }
Ok(())
} }

View file

@ -1,5 +1,3 @@
#![allow(clippy::unused_io_amount, clippy::too_many_arguments)]
#[macro_use] #[macro_use]
extern crate log; extern crate log;
@ -9,8 +7,8 @@ mod fetch;
mod range_set; mod range_set;
pub use decrypt::AudioDecrypt; pub use decrypt::AudioDecrypt;
pub use fetch::{AudioFile, StreamLoaderController}; pub use fetch::{AudioFile, AudioFileError, StreamLoaderController};
pub use fetch::{ pub use fetch::{
READ_AHEAD_BEFORE_PLAYBACK, READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK, MINIMUM_DOWNLOAD_SIZE, READ_AHEAD_BEFORE_PLAYBACK, READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK, READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS,
}; };

View file

@ -1,6 +1,8 @@
use std::cmp::{max, min}; use std::{
use std::fmt; cmp::{max, min},
use std::slice::Iter; fmt,
slice::Iter,
};
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub struct Range { pub struct Range {
@ -10,7 +12,7 @@ pub struct Range {
impl fmt::Display for Range { impl fmt::Display for Range {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
return write!(f, "[{}, {}]", self.start, self.start + self.length - 1); write!(f, "[{}, {}]", self.start, self.start + self.length - 1)
} }
} }
@ -24,16 +26,16 @@ impl Range {
} }
} }
#[derive(Clone)] #[derive(Debug, Clone)]
pub struct RangeSet { pub struct RangeSet {
ranges: Vec<Range>, ranges: Vec<Range>,
} }
impl fmt::Display for RangeSet { impl fmt::Display for RangeSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(").unwrap(); write!(f, "(")?;
for range in self.ranges.iter() { for range in self.ranges.iter() {
write!(f, "{}", range).unwrap(); write!(f, "{}", range)?;
} }
write!(f, ")") write!(f, ")")
} }
@ -227,7 +229,6 @@ impl RangeSet {
self.ranges[self_index].end(), self.ranges[self_index].end(),
other.ranges[other_index].end(), other.ranges[other_index].end(),
); );
assert!(new_start <= new_end);
result.add_range(&Range::new(new_start, new_end - new_start)); result.add_range(&Range::new(new_start, new_end - new_start));
if self.ranges[self_index].end() <= other.ranges[other_index].end() { if self.ranges[self_index].end() <= other.ranges[other_index].end() {
self_index += 1; self_index += 1;

View file

@ -9,14 +9,15 @@ edition = "2018"
[dependencies] [dependencies]
form_urlencoded = "1.0" form_urlencoded = "1.0"
futures-util = { version = "0.3.5", default_features = false } futures-util = "0.3"
log = "0.4" log = "0.4"
protobuf = "2.14.0" protobuf = "2"
rand = "0.8" rand = "0.8"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
tokio = { version = "1.0", features = ["macros", "sync"] } thiserror = "1.0"
tokio-stream = "0.1.1" tokio = { version = "1", features = ["macros", "parking_lot", "sync"] }
tokio-stream = "0.1"
[dependencies.librespot-core] [dependencies.librespot-core]
path = "../core" path = "../core"
@ -29,10 +30,3 @@ version = "0.4.2"
[dependencies.librespot-protocol] [dependencies.librespot-protocol]
path = "../protocol" path = "../protocol"
version = "0.4.2" version = "0.4.2"
[dependencies.librespot-discovery]
path = "../discovery"
version = "0.4.2"
[features]
with-dns-sd = ["librespot-discovery/with-dns-sd"]

20
connect/src/config.rs Normal file
View file

@ -0,0 +1,20 @@
use crate::core::config::DeviceType;
#[derive(Clone, Debug)]
pub struct ConnectConfig {
pub name: String,
pub device_type: DeviceType,
pub initial_volume: Option<u16>,
pub has_volume_ctrl: bool,
}
impl Default for ConnectConfig {
fn default() -> ConnectConfig {
ConnectConfig {
name: "Librespot".to_string(),
device_type: DeviceType::default(),
initial_volume: Some(50),
has_volume_ctrl: true,
}
}
}

View file

@ -1,7 +1,12 @@
// TODO : move to metadata
use crate::core::spotify_id::SpotifyId; use crate::core::spotify_id::SpotifyId;
use crate::protocol::spirc::TrackRef; use crate::protocol::spirc::TrackRef;
use serde::Deserialize; use serde::{
de::{Error, Unexpected},
Deserialize,
};
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
pub struct StationContext { pub struct StationContext {
@ -72,17 +77,23 @@ where
D: serde::Deserializer<'d>, D: serde::Deserializer<'d>,
{ {
let v: Vec<TrackContext> = serde::Deserialize::deserialize(de)?; let v: Vec<TrackContext> = serde::Deserialize::deserialize(de)?;
let track_vec = v v.iter()
.iter()
.map(|v| { .map(|v| {
let mut t = TrackRef::new(); let mut t = TrackRef::new();
// This has got to be the most round about way of doing this. // This has got to be the most round about way of doing this.
t.set_gid(SpotifyId::from_base62(&v.gid).unwrap().to_raw().to_vec()); t.set_gid(
SpotifyId::from_base62(&v.gid)
.map_err(|_| {
D::Error::invalid_value(
Unexpected::Str(&v.gid),
&"a Base-62 encoded Spotify ID",
)
})?
.to_raw()
.to_vec(),
);
t.set_uri(v.uri.to_owned()); t.set_uri(v.uri.to_owned());
Ok(t)
t
}) })
.collect::<Vec<TrackRef>>(); .collect::<Result<Vec<TrackRef>, D::Error>>()
Ok(track_vec)
} }

View file

@ -1,31 +0,0 @@
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use futures_util::Stream;
use librespot_core::authentication::Credentials;
use librespot_core::config::ConnectConfig;
pub struct DiscoveryStream(librespot_discovery::Discovery);
impl Stream for DiscoveryStream {
type Item = Credentials;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.0).poll_next(cx)
}
}
pub fn discovery(
config: ConnectConfig,
device_id: String,
port: u16,
) -> io::Result<DiscoveryStream> {
librespot_discovery::Discovery::builder(device_id)
.device_type(config.device_type)
.port(port)
.name(config.name)
.launch()
.map(DiscoveryStream)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))
}

View file

@ -5,10 +5,6 @@ use librespot_core as core;
use librespot_playback as playback; use librespot_playback as playback;
use librespot_protocol as protocol; use librespot_protocol as protocol;
pub mod config;
pub mod context; pub mod context;
#[deprecated(
since = "0.2.1",
note = "Please use the crate `librespot_discovery` instead."
)]
pub mod discovery;
pub mod spirc; pub mod spirc;

File diff suppressed because it is too large Load diff

View file

@ -13,42 +13,54 @@ path = "../protocol"
version = "0.4.2" version = "0.4.2"
[dependencies] [dependencies]
aes = "0.6" aes = "0.8"
base64 = "0.13" base64 = "0.13"
byteorder = "1.4" byteorder = "1.4"
bytes = "1.0" bytes = "1"
dns-sd = { version = "0.1", optional = true }
form_urlencoded = "1.0" form_urlencoded = "1.0"
futures-core = { version = "0.3", default-features = false } futures-core = "0.3"
futures-util = { version = "0.3", default-features = false, features = ["alloc", "bilock", "unstable", "sink"] } futures-util = { version = "0.3", features = ["alloc", "bilock", "sink", "unstable"] }
hmac = "0.11" hmac = "0.12"
httparse = "1.3" httparse = "1.7"
http = "0.2" http = "0.2"
hyper = { version = "0.14", features = ["client", "tcp", "http1"] } hyper = { version = "0.14", features = ["client", "http1", "http2", "tcp"] }
hyper-proxy = { version = "0.9.1", default-features = false } hyper-proxy = { version = "0.9", default-features = false, features = ["rustls"] }
hyper-rustls = { version = "0.23", features = ["http2"] }
log = "0.4" log = "0.4"
num = "0.4"
num-bigint = { version = "0.4", features = ["rand"] } num-bigint = { version = "0.4", features = ["rand"] }
num-derive = "0.3"
num-integer = "0.1" num-integer = "0.1"
num-traits = "0.2" num-traits = "0.2"
once_cell = "1.5.2" once_cell = "1"
pbkdf2 = { version = "0.8", default-features = false, features = ["hmac"] } parking_lot = { version = "0.12", features = ["deadlock_detection"] }
pbkdf2 = { version = "0.11", default-features = false, features = ["hmac"] }
priority-queue = "1.2" priority-queue = "1.2"
protobuf = "2.14.0" protobuf = "2"
quick-xml = { version = "0.23", features = ["serialize"] }
rand = "0.8" rand = "0.8"
rsa = "0.6"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
sha-1 = "0.9" sha1 = "0.10"
shannon = "0.2.0" shannon = "0.2"
thiserror = "1.0.7" thiserror = "1.0"
tokio = { version = "1.0", features = ["io-util", "net", "rt", "sync"] } time = "0.3"
tokio-stream = "0.1.1" tokio = { version = "1", features = ["io-util", "macros", "net", "parking_lot", "rt", "sync", "time"] }
tokio-stream = "0.1"
tokio-tungstenite = { version = "*", default-features = false, features = ["rustls-tls-native-roots"] }
tokio-util = { version = "0.7", features = ["codec"] } tokio-util = { version = "0.7", features = ["codec"] }
url = "2.1" url = "2"
uuid = { version = "1.0", default-features = false, features = ["v4"] } uuid = { version = "1", default-features = false, features = ["v4"] }
[build-dependencies] [build-dependencies]
rand = "0.8" rand = "0.8"
vergen = "3.0.4" vergen = { version = "7", default-features = false, features = ["build", "git"] }
[dev-dependencies] [dev-dependencies]
env_logger = "0.9" env_logger = "0.9"
tokio = {version = "1.0", features = ["macros"] } tokio = { version = "1", features = ["macros", "parking_lot"] }
[features]
with-dns-sd = ["dns-sd"]

View file

@ -1,14 +1,19 @@
use rand::distributions::Alphanumeric; use rand::{distributions::Alphanumeric, Rng};
use rand::Rng; use vergen::{vergen, Config, ShaKind, TimestampKind};
use std::env;
use vergen::{generate_cargo_keys, ConstantsFlags};
fn main() { fn main() {
let mut flags = ConstantsFlags::all(); let mut config = Config::default();
flags.toggle(ConstantsFlags::REBUILD_ON_HEAD_CHANGE); *config.build_mut().kind_mut() = TimestampKind::DateOnly;
generate_cargo_keys(ConstantsFlags::all()).expect("Unable to generate the cargo keys!"); *config.git_mut().enabled_mut() = true;
*config.git_mut().commit_timestamp_mut() = true;
*config.git_mut().commit_timestamp_kind_mut() = TimestampKind::DateOnly;
*config.git_mut().sha_mut() = true;
*config.git_mut().sha_kind_mut() = ShaKind::Short;
*config.git_mut().rerun_on_head_change_mut() = true;
let build_id = match env::var("SOURCE_DATE_EPOCH") { vergen(config).expect("Unable to generate the cargo keys!");
let build_id = match std::env::var("SOURCE_DATE_EPOCH") {
Ok(val) => val, Ok(val) => val,
Err(_) => rand::thread_rng() Err(_) => rand::thread_rng()
.sample_iter(Alphanumeric) .sample_iter(Alphanumeric)

View file

@ -1,108 +1,131 @@
use std::error::Error; use hyper::{Body, Method, Request};
use hyper::client::HttpConnector;
use hyper::{Body, Client, Method, Request, Uri};
use hyper_proxy::{Intercept, Proxy, ProxyConnector};
use serde::Deserialize; use serde::Deserialize;
use url::Url;
const APRESOLVE_ENDPOINT: &str = "http://apresolve.spotify.com:80"; use crate::Error;
const AP_FALLBACK: &str = "ap.spotify.com:443";
const AP_BLACKLIST: [&str; 2] = ["ap-gew4.spotify.com", "ap-gue1.spotify.com"];
#[derive(Clone, Debug, Deserialize)] pub type SocketAddress = (String, u16);
struct ApResolveData {
ap_list: Vec<String>, #[derive(Default)]
pub struct AccessPoints {
accesspoint: Vec<SocketAddress>,
dealer: Vec<SocketAddress>,
spclient: Vec<SocketAddress>,
} }
async fn try_apresolve( #[derive(Deserialize)]
proxy: Option<&Url>, pub struct ApResolveData {
ap_port: Option<u16>, accesspoint: Vec<String>,
) -> Result<String, Box<dyn Error>> { dealer: Vec<String>,
let port = ap_port.unwrap_or(443); spclient: Vec<String>,
}
let mut req = Request::new(Body::empty()); // These addresses probably do some geo-location based traffic management or at least DNS-based
*req.method_mut() = Method::GET; // load balancing. They are known to fail when the normal resolvers are up, so that's why they
// panic safety: APRESOLVE_ENDPOINT above is valid url. // should only be used as fallback.
*req.uri_mut() = APRESOLVE_ENDPOINT.parse().expect("invalid AP resolve URL"); impl Default for ApResolveData {
fn default() -> Self {
Self {
accesspoint: vec![String::from("ap.spotify.com:443")],
dealer: vec![String::from("dealer.spotify.com:443")],
spclient: vec![String::from("spclient.wg.spotify.com:443")],
}
}
}
let response = if let Some(url) = proxy { component! {
// Panic safety: all URLs are valid URIs ApResolver : ApResolverInner {
let uri = url.to_string().parse().unwrap(); data: AccessPoints = AccessPoints::default(),
let proxy = Proxy::new(Intercept::All, uri); }
let connector = HttpConnector::new(); }
let proxy_connector = ProxyConnector::from_proxy_unsecured(connector, proxy);
Client::builder()
.build(proxy_connector)
.request(req)
.await?
} else {
Client::new().request(req).await?
};
let body = hyper::body::to_bytes(response.into_body()).await?; impl ApResolver {
let data: ApResolveData = serde_json::from_slice(body.as_ref())?; // return a port if a proxy URL and/or a proxy port was specified. This is useful even when
// there is no proxy, but firewalls only allow certain ports (e.g. 443 and not 4070).
pub fn port_config(&self) -> Option<u16> {
if self.session().config().proxy.is_some() || self.session().config().ap_port.is_some() {
Some(self.session().config().ap_port.unwrap_or(443))
} else {
None
}
}
// filter APs that are known to cause channel errors fn process_data(&self, data: Vec<String>) -> Vec<SocketAddress> {
let aps: Vec<String> = data data.into_iter()
.ap_list .filter_map(|ap| {
.into_iter() let mut split = ap.rsplitn(2, ':');
.filter_map(|ap| { let port = split.next()?;
let host = ap.parse::<Uri>().ok()?.host()?.to_owned(); let host = split.next()?.to_owned();
if !AP_BLACKLIST.iter().any(|&blacklisted| host == blacklisted) { let port: u16 = port.parse().ok()?;
Some(ap) if let Some(p) = self.port_config() {
} else { if p != port {
warn!("Ignoring blacklisted access point {}", ap); return None;
None }
} }
Some((host, port))
})
.collect()
}
pub async fn try_apresolve(&self) -> Result<ApResolveData, Error> {
let req = Request::builder()
.method(Method::GET)
.uri("http://apresolve.spotify.com/?type=accesspoint&type=dealer&type=spclient")
.body(Body::empty())?;
let body = self.session().http_client().request_body(req).await?;
let data: ApResolveData = serde_json::from_slice(body.as_ref())?;
Ok(data)
}
async fn apresolve(&self) {
let result = self.try_apresolve().await;
self.lock(|inner| {
let data = match result {
Ok(data) => data,
Err(e) => {
warn!("Failed to resolve access points, using fallbacks: {}", e);
ApResolveData::default()
}
};
inner.data.accesspoint = self.process_data(data.accesspoint);
inner.data.dealer = self.process_data(data.dealer);
inner.data.spclient = self.process_data(data.spclient);
}) })
.collect(); }
let ap = if ap_port.is_some() || proxy.is_some() { fn is_empty(&self) -> bool {
// filter on ports if specified on the command line... self.lock(|inner| {
aps.into_iter().find_map(|ap| { inner.data.accesspoint.is_empty()
if ap.parse::<Uri>().ok()?.port()? == port { || inner.data.dealer.is_empty()
Some(ap) || inner.data.spclient.is_empty()
} else {
None
}
}) })
} else {
// ...or pick the first on the list
aps.into_iter().next()
}
.ok_or("Unable to resolve any viable access points.")?;
Ok(ap)
}
pub async fn apresolve(proxy: Option<&Url>, ap_port: Option<u16>) -> String {
try_apresolve(proxy, ap_port).await.unwrap_or_else(|e| {
warn!("Failed to resolve Access Point: {}", e);
warn!("Using fallback \"{}\"", AP_FALLBACK);
AP_FALLBACK.into()
})
}
#[cfg(test)]
mod test {
use std::net::ToSocketAddrs;
use super::try_apresolve;
#[tokio::test]
async fn test_apresolve() {
let ap = try_apresolve(None, None).await.unwrap();
// Assert that the result contains a valid host and port
ap.to_socket_addrs().unwrap().next().unwrap();
} }
#[tokio::test] pub async fn resolve(&self, endpoint: &str) -> Result<SocketAddress, Error> {
async fn test_apresolve_port_443() { if self.is_empty() {
let ap = try_apresolve(None, Some(443)).await.unwrap(); self.apresolve().await;
}
let port = ap.to_socket_addrs().unwrap().next().unwrap().port(); self.lock(|inner| {
assert_eq!(port, 443); let access_point = match endpoint {
// take the first position instead of the last with `pop`, because Spotify returns
// access points with ports 4070, 443 and 80 in order of preference from highest
// to lowest.
"accesspoint" => inner.data.accesspoint.remove(0),
"dealer" => inner.data.dealer.remove(0),
"spclient" => inner.data.spclient.remove(0),
_ => {
return Err(Error::unimplemented(format!(
"No implementation to resolve access point {}",
endpoint
)))
}
};
Ok(access_point)
})
} }
} }

View file

@ -1,52 +1,85 @@
use std::{collections::HashMap, io::Write};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use bytes::Bytes; use bytes::Bytes;
use std::collections::HashMap; use thiserror::Error;
use std::io::Write;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use crate::spotify_id::{FileId, SpotifyId}; use crate::{packet::PacketType, util::SeqGenerator, Error, FileId, SpotifyId};
use crate::util::SeqGenerator;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] #[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]
pub struct AudioKey(pub [u8; 16]); pub struct AudioKey(pub [u8; 16]);
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] #[derive(Debug, Error)]
pub struct AudioKeyError; pub enum AudioKeyError {
#[error("audio key error")]
AesKey,
#[error("other end of channel disconnected")]
Channel,
#[error("unexpected packet type {0}")]
Packet(u8),
#[error("sequence {0} not pending")]
Sequence(u32),
}
impl From<AudioKeyError> for Error {
fn from(err: AudioKeyError) -> Self {
match err {
AudioKeyError::AesKey => Error::unavailable(err),
AudioKeyError::Channel => Error::aborted(err),
AudioKeyError::Sequence(_) => Error::aborted(err),
AudioKeyError::Packet(_) => Error::unimplemented(err),
}
}
}
component! { component! {
AudioKeyManager : AudioKeyManagerInner { AudioKeyManager : AudioKeyManagerInner {
sequence: SeqGenerator<u32> = SeqGenerator::new(0), sequence: SeqGenerator<u32> = SeqGenerator::new(0),
pending: HashMap<u32, oneshot::Sender<Result<AudioKey, AudioKeyError>>> = HashMap::new(), pending: HashMap<u32, oneshot::Sender<Result<AudioKey, Error>>> = HashMap::new(),
} }
} }
impl AudioKeyManager { impl AudioKeyManager {
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) { pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) -> Result<(), Error> {
let seq = BigEndian::read_u32(data.split_to(4).as_ref()); let seq = BigEndian::read_u32(data.split_to(4).as_ref());
let sender = self.lock(|inner| inner.pending.remove(&seq)); let sender = self
.lock(|inner| inner.pending.remove(&seq))
.ok_or(AudioKeyError::Sequence(seq))?;
if let Some(sender) = sender { match cmd {
match cmd { PacketType::AesKey => {
0xd => { let mut key = [0u8; 16];
let mut key = [0u8; 16]; key.copy_from_slice(data.as_ref());
key.copy_from_slice(data.as_ref()); sender
let _ = sender.send(Ok(AudioKey(key))); .send(Ok(AudioKey(key)))
} .map_err(|_| AudioKeyError::Channel)?
0xe => { }
warn!( PacketType::AesKeyError => {
"error audio key {:x} {:x}", error!(
data.as_ref()[0], "error audio key {:x} {:x}",
data.as_ref()[1] data.as_ref()[0],
); data.as_ref()[1]
let _ = sender.send(Err(AudioKeyError)); );
} sender
_ => (), .send(Err(AudioKeyError::AesKey.into()))
.map_err(|_| AudioKeyError::Channel)?
}
_ => {
trace!(
"Did not expect {:?} AES key packet with data {:#?}",
cmd,
data
);
return Err(AudioKeyError::Packet(cmd as u8).into());
} }
} }
Ok(())
} }
pub async fn request(&self, track: SpotifyId, file: FileId) -> Result<AudioKey, AudioKeyError> { pub async fn request(&self, track: SpotifyId, file: FileId) -> Result<AudioKey, Error> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let seq = self.lock(move |inner| { let seq = self.lock(move |inner| {
@ -55,17 +88,17 @@ impl AudioKeyManager {
seq seq
}); });
self.send_key_request(seq, track, file); self.send_key_request(seq, track, file)?;
rx.await.map_err(|_| AudioKeyError)? rx.await?
} }
fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) { fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) -> Result<(), Error> {
let mut data: Vec<u8> = Vec::new(); let mut data: Vec<u8> = Vec::new();
data.write(&file.0).unwrap(); data.write_all(&file.0)?;
data.write(&track.to_raw()).unwrap(); data.write_all(&track.to_raw())?;
data.write_u32::<BigEndian>(seq).unwrap(); data.write_u32::<BigEndian>(seq)?;
data.write_u16::<BigEndian>(0x0000).unwrap(); data.write_u16::<BigEndian>(0x0000)?;
self.session().send_packet(0xc, data) self.session().send_packet(PacketType::RequestKey, data)
} }
} }

View file

@ -7,8 +7,23 @@ use pbkdf2::pbkdf2;
use protobuf::ProtobufEnum; use protobuf::ProtobufEnum;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use thiserror::Error;
use crate::protocol::authentication::AuthenticationType; use crate::{protocol::authentication::AuthenticationType, Error};
#[derive(Debug, Error)]
pub enum AuthenticationError {
#[error("unknown authentication type {0}")]
AuthType(u32),
#[error("invalid key")]
Key,
}
impl From<AuthenticationError> for Error {
fn from(err: AuthenticationError) -> Self {
Error::invalid_argument(err)
}
}
/// The credentials are used to log into the Spotify API. /// The credentials are used to log into the Spotify API.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -46,7 +61,7 @@ impl Credentials {
username: impl Into<String>, username: impl Into<String>,
encrypted_blob: impl AsRef<[u8]>, encrypted_blob: impl AsRef<[u8]>,
device_id: impl AsRef<[u8]>, device_id: impl AsRef<[u8]>,
) -> Credentials { ) -> Result<Credentials, Error> {
fn read_u8<R: Read>(stream: &mut R) -> io::Result<u8> { fn read_u8<R: Read>(stream: &mut R) -> io::Result<u8> {
let mut data = [0u8]; let mut data = [0u8];
stream.read_exact(&mut data)?; stream.read_exact(&mut data)?;
@ -77,6 +92,10 @@ impl Credentials {
let key = { let key = {
let mut key = [0u8; 24]; let mut key = [0u8; 24];
if key.len() < 20 {
return Err(AuthenticationError::Key.into());
}
pbkdf2::<Hmac<Sha1>>(&secret, username.as_bytes(), 0x100, &mut key[0..20]); pbkdf2::<Hmac<Sha1>>(&secret, username.as_bytes(), 0x100, &mut key[0..20]);
let hash = &Sha1::digest(&key[..20]); let hash = &Sha1::digest(&key[..20]);
@ -87,15 +106,13 @@ impl Credentials {
// decrypt data using ECB mode without padding // decrypt data using ECB mode without padding
let blob = { let blob = {
use aes::cipher::generic_array::typenum::Unsigned;
use aes::cipher::generic_array::GenericArray; use aes::cipher::generic_array::GenericArray;
use aes::cipher::{BlockCipher, NewBlockCipher}; use aes::cipher::{BlockDecrypt, BlockSizeUser, KeyInit};
let mut data = base64::decode(encrypted_blob).unwrap(); let mut data = base64::decode(encrypted_blob)?;
let cipher = Aes192::new(GenericArray::from_slice(&key)); let cipher = Aes192::new(GenericArray::from_slice(&key));
let block_size = <Aes192 as BlockCipher>::BlockSize::to_usize(); let block_size = Aes192::block_size();
assert_eq!(data.len() % block_size, 0);
for chunk in data.chunks_exact_mut(block_size) { for chunk in data.chunks_exact_mut(block_size) {
cipher.decrypt_block(GenericArray::from_mut_slice(chunk)); cipher.decrypt_block(GenericArray::from_mut_slice(chunk));
} }
@ -109,19 +126,20 @@ impl Credentials {
}; };
let mut cursor = io::Cursor::new(blob.as_slice()); let mut cursor = io::Cursor::new(blob.as_slice());
read_u8(&mut cursor).unwrap(); read_u8(&mut cursor)?;
read_bytes(&mut cursor).unwrap(); read_bytes(&mut cursor)?;
read_u8(&mut cursor).unwrap(); read_u8(&mut cursor)?;
let auth_type = read_int(&mut cursor).unwrap(); let auth_type = read_int(&mut cursor)?;
let auth_type = AuthenticationType::from_i32(auth_type as i32).unwrap(); let auth_type = AuthenticationType::from_i32(auth_type as i32)
read_u8(&mut cursor).unwrap(); .ok_or(AuthenticationError::AuthType(auth_type))?;
let auth_data = read_bytes(&mut cursor).unwrap(); read_u8(&mut cursor)?;
let auth_data = read_bytes(&mut cursor)?;
Credentials { Ok(Credentials {
username, username,
auth_type, auth_type,
auth_data, auth_data,
} })
} }
} }

View file

@ -1,15 +1,30 @@
use std::cmp::Reverse; use std::{
use std::collections::HashMap; cmp::Reverse,
use std::fs::{self, File}; collections::HashMap,
use std::io::{self, Error, ErrorKind, Read, Write}; fs::{self, File},
use std::path::{Path, PathBuf}; io::{self, Read, Write},
use std::sync::{Arc, Mutex}; path::{Path, PathBuf},
use std::time::SystemTime; sync::Arc,
time::SystemTime,
};
use parking_lot::Mutex;
use priority_queue::PriorityQueue; use priority_queue::PriorityQueue;
use thiserror::Error;
use crate::authentication::Credentials; use crate::{authentication::Credentials, error::ErrorKind, Error, FileId};
use crate::spotify_id::FileId;
#[derive(Debug, Error)]
pub enum CacheError {
#[error("audio cache location is not configured")]
Path,
}
impl From<CacheError> for Error {
fn from(err: CacheError) -> Self {
Error::failed_precondition(err)
}
}
/// Some kind of data structure that holds some paths, the size of these files and a timestamp. /// Some kind of data structure that holds some paths, the size of these files and a timestamp.
/// It keeps track of the file sizes and is able to pop the path with the oldest timestamp if /// It keeps track of the file sizes and is able to pop the path with the oldest timestamp if
@ -57,16 +72,17 @@ impl SizeLimiter {
/// to delete the file in the file system. /// to delete the file in the file system.
fn pop(&mut self) -> Option<PathBuf> { fn pop(&mut self) -> Option<PathBuf> {
if self.exceeds_limit() { if self.exceeds_limit() {
let (next, _) = self if let Some((next, _)) = self.queue.pop() {
.queue if let Some(size) = self.sizes.remove(&next) {
.pop() self.in_use -= size;
.expect("in_use was > 0, so the queue should have contained an item."); } else {
let size = self error!("`queue` and `sizes` should have the same keys.");
.sizes }
.remove(&next) Some(next)
.expect("`queue` and `sizes` should have the same keys."); } else {
self.in_use -= size; error!("in_use was > 0, so the queue should have contained an item.");
Some(next) None
}
} else { } else {
None None
} }
@ -85,11 +101,11 @@ impl SizeLimiter {
return false; return false;
} }
let size = self if let Some(size) = self.sizes.remove(file) {
.sizes self.in_use -= size;
.remove(file) } else {
.expect("`queue` and `sizes` should have the same keys."); error!("`queue` and `sizes` should have the same keys.");
self.in_use -= size; }
true true
} }
@ -173,23 +189,21 @@ impl FsSizeLimiter {
} }
fn add(&self, file: &Path, size: u64) { fn add(&self, file: &Path, size: u64) {
self.limiter self.limiter.lock().add(file, size, SystemTime::now())
.lock()
.unwrap()
.add(file, size, SystemTime::now());
} }
fn touch(&self, file: &Path) -> bool { fn touch(&self, file: &Path) -> bool {
self.limiter.lock().unwrap().update(file, SystemTime::now()) self.limiter.lock().update(file, SystemTime::now())
} }
fn remove(&self, file: &Path) { fn remove(&self, file: &Path) -> bool {
self.limiter.lock().unwrap().remove(file); self.limiter.lock().remove(file)
} }
fn prune_internal<F: FnMut() -> Option<PathBuf>>(mut pop: F) { fn prune_internal<F: FnMut() -> Option<PathBuf>>(mut pop: F) -> Result<(), Error> {
let mut first = true; let mut first = true;
let mut count = 0; let mut count = 0;
let mut last_error = None;
while let Some(file) = pop() { while let Some(file) = pop() {
if first { if first {
@ -197,8 +211,10 @@ impl FsSizeLimiter {
first = false; first = false;
} }
if let Err(e) = fs::remove_file(&file) { let res = fs::remove_file(&file);
if let Err(e) = res {
warn!("Could not remove file {:?} from cache dir: {}", file, e); warn!("Could not remove file {:?} from cache dir: {}", file, e);
last_error = Some(e);
} else { } else {
count += 1; count += 1;
} }
@ -207,21 +223,27 @@ impl FsSizeLimiter {
if count > 0 { if count > 0 {
info!("Removed {} cache files.", count); info!("Removed {} cache files.", count);
} }
if let Some(err) = last_error {
Err(err.into())
} else {
Ok(())
}
} }
fn prune(&self) { fn prune(&self) -> Result<(), Error> {
Self::prune_internal(|| self.limiter.lock().unwrap().pop()) Self::prune_internal(|| self.limiter.lock().pop())
} }
fn new(path: &Path, limit: u64) -> Self { fn new(path: &Path, limit: u64) -> Result<Self, Error> {
let mut limiter = SizeLimiter::new(limit); let mut limiter = SizeLimiter::new(limit);
Self::init_dir(&mut limiter, path); Self::init_dir(&mut limiter, path);
Self::prune_internal(|| limiter.pop()); Self::prune_internal(|| limiter.pop())?;
Self { Ok(Self {
limiter: Mutex::new(limiter), limiter: Mutex::new(limiter),
} })
} }
} }
@ -234,15 +256,13 @@ pub struct Cache {
size_limiter: Option<Arc<FsSizeLimiter>>, size_limiter: Option<Arc<FsSizeLimiter>>,
} }
pub struct RemoveFileError(());
impl Cache { impl Cache {
pub fn new<P: AsRef<Path>>( pub fn new<P: AsRef<Path>>(
credentials_path: Option<P>, credentials_path: Option<P>,
volume_path: Option<P>, volume_path: Option<P>,
audio_path: Option<P>, audio_path: Option<P>,
size_limit: Option<u64>, size_limit: Option<u64>,
) -> io::Result<Self> { ) -> Result<Self, Error> {
let mut size_limiter = None; let mut size_limiter = None;
if let Some(location) = &credentials_path { if let Some(location) = &credentials_path {
@ -263,8 +283,7 @@ impl Cache {
fs::create_dir_all(location)?; fs::create_dir_all(location)?;
if let Some(limit) = size_limit { if let Some(limit) = size_limit {
let limiter = FsSizeLimiter::new(location.as_ref(), limit); let limiter = FsSizeLimiter::new(location.as_ref(), limit)?;
size_limiter = Some(Arc::new(limiter)); size_limiter = Some(Arc::new(limiter));
} }
} }
@ -285,11 +304,11 @@ impl Cache {
let location = self.credentials_location.as_ref()?; let location = self.credentials_location.as_ref()?;
// This closure is just convencience to enable the question mark operator // This closure is just convencience to enable the question mark operator
let read = || { let read = || -> Result<Credentials, Error> {
let mut file = File::open(location)?; let mut file = File::open(location)?;
let mut contents = String::new(); let mut contents = String::new();
file.read_to_string(&mut contents)?; file.read_to_string(&mut contents)?;
serde_json::from_str(&contents).map_err(|e| Error::new(ErrorKind::InvalidData, e)) Ok(serde_json::from_str(&contents)?)
}; };
match read() { match read() {
@ -297,7 +316,7 @@ impl Cache {
Err(e) => { Err(e) => {
// If the file did not exist, the file was probably not written // If the file did not exist, the file was probably not written
// before. Otherwise, log the error. // before. Otherwise, log the error.
if e.kind() != ErrorKind::NotFound { if e.kind != ErrorKind::NotFound {
warn!("Error reading credentials from cache: {}", e); warn!("Error reading credentials from cache: {}", e);
} }
None None
@ -321,19 +340,17 @@ impl Cache {
pub fn volume(&self) -> Option<u16> { pub fn volume(&self) -> Option<u16> {
let location = self.volume_location.as_ref()?; let location = self.volume_location.as_ref()?;
let read = || { let read = || -> Result<u16, Error> {
let mut file = File::open(location)?; let mut file = File::open(location)?;
let mut contents = String::new(); let mut contents = String::new();
file.read_to_string(&mut contents)?; file.read_to_string(&mut contents)?;
contents Ok(contents.parse()?)
.parse()
.map_err(|e| Error::new(ErrorKind::InvalidData, e))
}; };
match read() { match read() {
Ok(v) => Some(v), Ok(v) => Some(v),
Err(e) => { Err(e) => {
if e.kind() != ErrorKind::NotFound { if e.kind != ErrorKind::NotFound {
warn!("Error reading volume from cache: {}", e); warn!("Error reading volume from cache: {}", e);
} }
None None
@ -350,7 +367,7 @@ impl Cache {
} }
} }
fn file_path(&self, file: FileId) -> Option<PathBuf> { pub fn file_path(&self, file: FileId) -> Option<PathBuf> {
match file.to_base16() { match file.to_base16() {
Ok(name) => self.audio_location.as_ref().map(|location| { Ok(name) => self.audio_location.as_ref().map(|location| {
let mut path = location.join(&name[0..2]); let mut path = location.join(&name[0..2]);
@ -358,7 +375,7 @@ impl Cache {
path path
}), }),
Err(e) => { Err(e) => {
warn!("Invalid FileId: {}", e.utf8_error()); warn!("Invalid FileId: {}", e);
None None
} }
} }
@ -369,12 +386,14 @@ impl Cache {
match File::open(&path) { match File::open(&path) {
Ok(file) => { Ok(file) => {
if let Some(limiter) = self.size_limiter.as_deref() { if let Some(limiter) = self.size_limiter.as_deref() {
limiter.touch(&path); if !limiter.touch(&path) {
error!("limiter could not touch {:?}", path);
}
} }
Some(file) Some(file)
} }
Err(e) => { Err(e) => {
if e.kind() != ErrorKind::NotFound { if e.kind() != io::ErrorKind::NotFound {
warn!("Error reading file from cache: {}", e) warn!("Error reading file from cache: {}", e)
} }
None None
@ -382,38 +401,33 @@ impl Cache {
} }
} }
pub fn save_file<F: Read>(&self, file: FileId, contents: &mut F) { pub fn save_file<F: Read>(&self, file: FileId, contents: &mut F) -> Result<PathBuf, Error> {
let path = if let Some(path) = self.file_path(file) { if let Some(path) = self.file_path(file) {
path if let Some(parent) = path.parent() {
} else { if let Ok(size) = fs::create_dir_all(parent)
return; .and_then(|_| File::create(&path))
}; .and_then(|mut file| io::copy(contents, &mut file))
let parent = path.parent().unwrap(); {
if let Some(limiter) = self.size_limiter.as_deref() {
let result = fs::create_dir_all(parent) limiter.add(&path, size);
.and_then(|_| File::create(&path)) limiter.prune()?;
.and_then(|mut file| io::copy(contents, &mut file)); }
return Ok(path);
if let Ok(size) = result { }
if let Some(limiter) = self.size_limiter.as_deref() {
limiter.add(&path, size);
limiter.prune();
} }
} }
Err(CacheError::Path.into())
} }
pub fn remove_file(&self, file: FileId) -> Result<(), RemoveFileError> { pub fn remove_file(&self, file: FileId) -> Result<(), Error> {
let path = self.file_path(file).ok_or(RemoveFileError(()))?; let path = self.file_path(file).ok_or(CacheError::Path)?;
if let Err(err) = fs::remove_file(&path) { fs::remove_file(&path)?;
warn!("Unable to remove file from cache: {}", err); if let Some(limiter) = self.size_limiter.as_deref() {
Err(RemoveFileError(())) limiter.remove(&path);
} else {
if let Some(limiter) = self.size_limiter.as_deref() {
limiter.remove(&path);
}
Ok(())
} }
Ok(())
} }
} }

165
core/src/cdn_url.rs Normal file
View file

@ -0,0 +1,165 @@
use std::{
convert::TryFrom,
ops::{Deref, DerefMut},
};
use protobuf::Message;
use thiserror::Error;
use url::Url;
use super::{date::Date, Error, FileId, Session};
use librespot_protocol as protocol;
use protocol::storage_resolve::StorageResolveResponse as CdnUrlMessage;
use protocol::storage_resolve::StorageResolveResponse_Result;
#[derive(Debug, Clone)]
pub struct MaybeExpiringUrl(pub String, pub Option<Date>);
#[derive(Debug, Clone)]
pub struct MaybeExpiringUrls(pub Vec<MaybeExpiringUrl>);
impl Deref for MaybeExpiringUrls {
type Target = Vec<MaybeExpiringUrl>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for MaybeExpiringUrls {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[derive(Debug, Error)]
pub enum CdnUrlError {
#[error("all URLs expired")]
Expired,
#[error("resolved storage is not for CDN")]
Storage,
#[error("no URLs resolved")]
Unresolved,
}
impl From<CdnUrlError> for Error {
fn from(err: CdnUrlError) -> Self {
match err {
CdnUrlError::Expired => Error::deadline_exceeded(err),
CdnUrlError::Storage | CdnUrlError::Unresolved => Error::unavailable(err),
}
}
}
#[derive(Debug, Clone)]
pub struct CdnUrl {
pub file_id: FileId,
urls: MaybeExpiringUrls,
}
impl CdnUrl {
pub fn new(file_id: FileId) -> Self {
Self {
file_id,
urls: MaybeExpiringUrls(Vec::new()),
}
}
pub async fn resolve_audio(&self, session: &Session) -> Result<Self, Error> {
let file_id = self.file_id;
let response = session.spclient().get_audio_storage(file_id).await?;
let msg = CdnUrlMessage::parse_from_bytes(&response)?;
let urls = MaybeExpiringUrls::try_from(msg)?;
let cdn_url = Self { file_id, urls };
trace!("Resolved CDN storage: {:#?}", cdn_url);
Ok(cdn_url)
}
pub fn try_get_url(&self) -> Result<&str, Error> {
if self.urls.is_empty() {
return Err(CdnUrlError::Unresolved.into());
}
let now = Date::now_utc();
let url = self.urls.iter().find(|url| match url.1 {
Some(expiry) => now < expiry,
None => true,
});
if let Some(url) = url {
Ok(&url.0)
} else {
Err(CdnUrlError::Expired.into())
}
}
}
impl TryFrom<CdnUrlMessage> for MaybeExpiringUrls {
type Error = crate::Error;
fn try_from(msg: CdnUrlMessage) -> Result<Self, Self::Error> {
if !matches!(msg.get_result(), StorageResolveResponse_Result::CDN) {
return Err(CdnUrlError::Storage.into());
}
let is_expiring = !msg.get_fileid().is_empty();
let result = msg
.get_cdnurl()
.iter()
.map(|cdn_url| {
let url = Url::parse(cdn_url)?;
if is_expiring {
let expiry_str = if let Some(token) = url
.query_pairs()
.into_iter()
.find(|(key, _value)| key == "__token__")
{
if let Some(mut start) = token.1.find("exp=") {
start += 4;
if token.1.len() >= start {
let slice = &token.1[start..];
if let Some(end) = slice.find('~') {
// this is the only valid invariant for akamaized.net
String::from(&slice[..end])
} else {
String::from(slice)
}
} else {
String::new()
}
} else {
String::new()
}
} else if let Some(query) = url.query() {
let mut items = query.split('_');
if let Some(first) = items.next() {
// this is the only valid invariant for scdn.co
String::from(first)
} else {
String::new()
}
} else {
String::new()
};
let mut expiry: i64 = expiry_str.parse()?;
expiry -= 5 * 60; // seconds
Ok(MaybeExpiringUrl(
cdn_url.to_owned(),
Some(Date::from_timestamp_ms(expiry * 1000)?),
))
} else {
Ok(MaybeExpiringUrl(cdn_url.to_owned(), None))
}
})
.collect::<Result<Vec<MaybeExpiringUrl>, Error>>()?;
Ok(Self(result))
}
}

View file

@ -1,16 +1,20 @@
use std::collections::HashMap; use std::{
use std::pin::Pin; collections::HashMap,
use std::task::{Context, Poll}; fmt,
use std::time::Instant; pin::Pin,
task::{Context, Poll},
time::Instant,
};
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
use futures_core::Stream; use futures_core::Stream;
use futures_util::lock::BiLock; use futures_util::{lock::BiLock, ready, StreamExt};
use futures_util::{ready, StreamExt}; use num_traits::FromPrimitive;
use thiserror::Error;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use crate::util::SeqGenerator; use crate::{packet::PacketType, util::SeqGenerator, Error};
component! { component! {
ChannelManager : ChannelManagerInner { ChannelManager : ChannelManagerInner {
@ -25,9 +29,21 @@ component! {
const ONE_SECOND_IN_MS: usize = 1000; const ONE_SECOND_IN_MS: usize = 1000;
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] #[derive(Debug, Error, Hash, PartialEq, Eq, Copy, Clone)]
pub struct ChannelError; pub struct ChannelError;
impl From<ChannelError> for Error {
fn from(err: ChannelError) -> Self {
Error::aborted(err)
}
}
impl fmt::Display for ChannelError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "channel error")
}
}
pub struct Channel { pub struct Channel {
receiver: mpsc::UnboundedReceiver<(u8, Bytes)>, receiver: mpsc::UnboundedReceiver<(u8, Bytes)>,
state: ChannelState, state: ChannelState,
@ -68,7 +84,7 @@ impl ChannelManager {
(seq, channel) (seq, channel)
} }
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) { pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) -> Result<(), Error> {
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
let id: u16 = BigEndian::read_u16(data.split_to(2).as_ref()); let id: u16 = BigEndian::read_u16(data.split_to(2).as_ref());
@ -92,9 +108,14 @@ impl ChannelManager {
inner.download_measurement_bytes += data.len(); inner.download_measurement_bytes += data.len();
if let Entry::Occupied(entry) = inner.channels.entry(id) { if let Entry::Occupied(entry) = inner.channels.entry(id) {
let _ = entry.get().send((cmd, data)); entry
.get()
.send((cmd as u8, data))
.map_err(|_| ChannelError)?;
} }
});
Ok(())
})
} }
pub fn get_download_rate_estimate(&self) -> usize { pub fn get_download_rate_estimate(&self) -> usize {
@ -114,7 +135,8 @@ impl Channel {
fn recv_packet(&mut self, cx: &mut Context<'_>) -> Poll<Result<Bytes, ChannelError>> { fn recv_packet(&mut self, cx: &mut Context<'_>) -> Poll<Result<Bytes, ChannelError>> {
let (cmd, packet) = ready!(self.receiver.poll_recv(cx)).ok_or(ChannelError)?; let (cmd, packet) = ready!(self.receiver.poll_recv(cx)).ok_or(ChannelError)?;
if cmd == 0xa { let packet_type = FromPrimitive::from_u8(cmd);
if let Some(PacketType::ChannelError) = packet_type {
let code = BigEndian::read_u16(&packet.as_ref()[..2]); let code = BigEndian::read_u16(&packet.as_ref()[..2]);
error!("channel error: {} {}", packet.len(), code); error!("channel error: {} {}", packet.len(), code);
@ -139,7 +161,11 @@ impl Stream for Channel {
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
loop { loop {
match self.state.clone() { match self.state.clone() {
ChannelState::Closed => panic!("Polling already terminated channel"), ChannelState::Closed => {
error!("Polling already terminated channel");
return Poll::Ready(None);
}
ChannelState::Header(mut data) => { ChannelState::Header(mut data) => {
if data.is_empty() { if data.is_empty() {
data = ready!(self.recv_packet(cx))?; data = ready!(self.recv_packet(cx))?;
@ -147,7 +173,6 @@ impl Stream for Channel {
let length = BigEndian::read_u16(data.split_to(2).as_ref()) as usize; let length = BigEndian::read_u16(data.split_to(2).as_ref()) as usize;
if length == 0 { if length == 0 {
assert_eq!(data.len(), 0);
self.state = ChannelState::Data; self.state = ChannelState::Data;
} else { } else {
let header_id = data.split_to(1).as_ref()[0]; let header_id = data.split_to(1).as_ref()[0];

View file

@ -1,20 +1,20 @@
macro_rules! component { macro_rules! component {
($name:ident : $inner:ident { $($key:ident : $ty:ty = $value:expr,)* }) => { ($name:ident : $inner:ident { $($key:ident : $ty:ty = $value:expr,)* }) => {
#[derive(Clone)] #[derive(Clone)]
pub struct $name(::std::sync::Arc<($crate::session::SessionWeak, ::std::sync::Mutex<$inner>)>); pub struct $name(::std::sync::Arc<($crate::session::SessionWeak, ::parking_lot::Mutex<$inner>)>);
impl $name { impl $name {
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) fn new(session: $crate::session::SessionWeak) -> $name { pub(crate) fn new(session: $crate::session::SessionWeak) -> $name {
debug!(target:"librespot::component", "new {}", stringify!($name)); debug!(target:"librespot::component", "new {}", stringify!($name));
$name(::std::sync::Arc::new((session, ::std::sync::Mutex::new($inner { $name(::std::sync::Arc::new((session, ::parking_lot::Mutex::new($inner {
$($key : $value,)* $($key : $value,)*
})))) }))))
} }
#[allow(dead_code)] #[allow(dead_code)]
fn lock<F: FnOnce(&mut $inner) -> R, R>(&self, f: F) -> R { fn lock<F: FnOnce(&mut $inner) -> R, R>(&self, f: F) -> R {
let mut inner = (self.0).1.lock().expect("Mutex poisoned"); let mut inner = (self.0).1.lock();
f(&mut inner) f(&mut inner)
} }

View file

@ -1,23 +1,27 @@
use std::fmt; use std::{fmt, path::PathBuf, str::FromStr};
use std::str::FromStr;
use url::Url; use url::Url;
const KEYMASTER_CLIENT_ID: &str = "65b708073fc0480ea92a077233ca87bd";
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SessionConfig { pub struct SessionConfig {
pub user_agent: String, pub client_id: String,
pub device_id: String, pub device_id: String,
pub proxy: Option<Url>, pub proxy: Option<Url>,
pub ap_port: Option<u16>, pub ap_port: Option<u16>,
pub tmp_dir: PathBuf,
} }
impl Default for SessionConfig { impl Default for SessionConfig {
fn default() -> SessionConfig { fn default() -> SessionConfig {
let device_id = uuid::Uuid::new_v4().as_hyphenated().to_string(); let device_id = uuid::Uuid::new_v4().as_hyphenated().to_string();
SessionConfig { SessionConfig {
user_agent: crate::version::VERSION_STRING.to_string(), client_id: KEYMASTER_CLIENT_ID.to_owned(),
device_id, device_id,
proxy: None, proxy: None,
ap_port: None, ap_port: None,
tmp_dir: std::env::temp_dir(),
} }
} }
} }
@ -116,24 +120,3 @@ impl Default for DeviceType {
DeviceType::Speaker DeviceType::Speaker
} }
} }
#[derive(Clone, Debug)]
pub struct ConnectConfig {
pub name: String,
pub device_type: DeviceType,
pub initial_volume: Option<u16>,
pub has_volume_ctrl: bool,
pub autoplay: bool,
}
impl Default for ConnectConfig {
fn default() -> ConnectConfig {
ConnectConfig {
name: "Librespot".to_string(),
device_type: DeviceType::default(),
initial_volume: Some(50),
has_volume_ctrl: true,
autoplay: false,
}
}
}

View file

@ -1,12 +1,20 @@
use std::io;
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes, BytesMut}; use bytes::{BufMut, Bytes, BytesMut};
use shannon::Shannon; use shannon::Shannon;
use std::io; use thiserror::Error;
use tokio_util::codec::{Decoder, Encoder}; use tokio_util::codec::{Decoder, Encoder};
const HEADER_SIZE: usize = 3; const HEADER_SIZE: usize = 3;
const MAC_SIZE: usize = 4; const MAC_SIZE: usize = 4;
#[derive(Debug, Error)]
pub enum ApCodecError {
#[error("payload was malformed")]
Payload,
}
#[derive(Debug)] #[derive(Debug)]
enum DecodeState { enum DecodeState {
Header, Header,
@ -87,7 +95,10 @@ impl Decoder for ApCodec {
let mut payload = buf.split_to(size + MAC_SIZE); let mut payload = buf.split_to(size + MAC_SIZE);
self.decode_cipher.decrypt(payload.get_mut(..size).unwrap()); self.decode_cipher
.decrypt(payload.get_mut(..size).ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidData, ApCodecError::Payload)
})?);
let mac = payload.split_off(size); let mac = payload.split_off(size);
self.decode_cipher.check_mac(mac.as_ref())?; self.decode_cipher.check_mac(mac.as_ref())?;

View file

@ -1,16 +1,50 @@
use std::{env::consts::ARCH, io};
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use hmac::{Hmac, Mac, NewMac}; use hmac::{Hmac, Mac};
use protobuf::{self, Message}; use protobuf::{self, Message};
use rand::{thread_rng, RngCore}; use rand::{thread_rng, RngCore};
use sha1::Sha1; use rsa::{BigUint, PublicKey};
use std::io; use sha1::{Digest, Sha1};
use thiserror::Error;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt}; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use tokio_util::codec::{Decoder, Framed}; use tokio_util::codec::{Decoder, Framed};
use super::codec::ApCodec; use super::codec::ApCodec;
use crate::diffie_hellman::DhLocalKeys;
use crate::{diffie_hellman::DhLocalKeys, version};
use crate::protocol; use crate::protocol;
use crate::protocol::keyexchange::{APResponseMessage, ClientHello, ClientResponsePlaintext}; use crate::protocol::keyexchange::{
APResponseMessage, ClientHello, ClientResponsePlaintext, Platform, ProductFlags,
};
const SERVER_KEY: [u8; 256] = [
0xac, 0xe0, 0x46, 0x0b, 0xff, 0xc2, 0x30, 0xaf, 0xf4, 0x6b, 0xfe, 0xc3, 0xbf, 0xbf, 0x86, 0x3d,
0xa1, 0x91, 0xc6, 0xcc, 0x33, 0x6c, 0x93, 0xa1, 0x4f, 0xb3, 0xb0, 0x16, 0x12, 0xac, 0xac, 0x6a,
0xf1, 0x80, 0xe7, 0xf6, 0x14, 0xd9, 0x42, 0x9d, 0xbe, 0x2e, 0x34, 0x66, 0x43, 0xe3, 0x62, 0xd2,
0x32, 0x7a, 0x1a, 0x0d, 0x92, 0x3b, 0xae, 0xdd, 0x14, 0x02, 0xb1, 0x81, 0x55, 0x05, 0x61, 0x04,
0xd5, 0x2c, 0x96, 0xa4, 0x4c, 0x1e, 0xcc, 0x02, 0x4a, 0xd4, 0xb2, 0x0c, 0x00, 0x1f, 0x17, 0xed,
0xc2, 0x2f, 0xc4, 0x35, 0x21, 0xc8, 0xf0, 0xcb, 0xae, 0xd2, 0xad, 0xd7, 0x2b, 0x0f, 0x9d, 0xb3,
0xc5, 0x32, 0x1a, 0x2a, 0xfe, 0x59, 0xf3, 0x5a, 0x0d, 0xac, 0x68, 0xf1, 0xfa, 0x62, 0x1e, 0xfb,
0x2c, 0x8d, 0x0c, 0xb7, 0x39, 0x2d, 0x92, 0x47, 0xe3, 0xd7, 0x35, 0x1a, 0x6d, 0xbd, 0x24, 0xc2,
0xae, 0x25, 0x5b, 0x88, 0xff, 0xab, 0x73, 0x29, 0x8a, 0x0b, 0xcc, 0xcd, 0x0c, 0x58, 0x67, 0x31,
0x89, 0xe8, 0xbd, 0x34, 0x80, 0x78, 0x4a, 0x5f, 0xc9, 0x6b, 0x89, 0x9d, 0x95, 0x6b, 0xfc, 0x86,
0xd7, 0x4f, 0x33, 0xa6, 0x78, 0x17, 0x96, 0xc9, 0xc3, 0x2d, 0x0d, 0x32, 0xa5, 0xab, 0xcd, 0x05,
0x27, 0xe2, 0xf7, 0x10, 0xa3, 0x96, 0x13, 0xc4, 0x2f, 0x99, 0xc0, 0x27, 0xbf, 0xed, 0x04, 0x9c,
0x3c, 0x27, 0x58, 0x04, 0xb6, 0xb2, 0x19, 0xf9, 0xc1, 0x2f, 0x02, 0xe9, 0x48, 0x63, 0xec, 0xa1,
0xb6, 0x42, 0xa0, 0x9d, 0x48, 0x25, 0xf8, 0xb3, 0x9d, 0xd0, 0xe8, 0x6a, 0xf9, 0x48, 0x4d, 0xa1,
0xc2, 0xba, 0x86, 0x30, 0x42, 0xea, 0x9d, 0xb3, 0x08, 0x6c, 0x19, 0x0e, 0x48, 0xb3, 0x9d, 0x66,
0xeb, 0x00, 0x06, 0xa2, 0x5a, 0xee, 0xa1, 0x1b, 0x13, 0x87, 0x3c, 0xd7, 0x19, 0xe6, 0x55, 0xbd,
];
#[derive(Debug, Error)]
pub enum HandshakeError {
#[error("invalid key length")]
InvalidLength,
#[error("server key verification failed")]
VerificationFailed,
}
pub async fn handshake<T: AsyncRead + AsyncWrite + Unpin>( pub async fn handshake<T: AsyncRead + AsyncWrite + Unpin>(
mut connection: T, mut connection: T,
@ -25,9 +59,37 @@ pub async fn handshake<T: AsyncRead + AsyncWrite + Unpin>(
.get_diffie_hellman() .get_diffie_hellman()
.get_gs() .get_gs()
.to_owned(); .to_owned();
let remote_signature = message
.get_challenge()
.get_login_crypto_challenge()
.get_diffie_hellman()
.get_gs_signature()
.to_owned();
// Prevent man-in-the-middle attacks: check server signature
let n = BigUint::from_bytes_be(&SERVER_KEY);
let e = BigUint::new(vec![65537]);
let public_key = rsa::RsaPublicKey::new(n, e).map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidData,
HandshakeError::VerificationFailed,
)
})?;
let hash = Sha1::digest(&remote_key);
let padding = rsa::padding::PaddingScheme::new_pkcs1v15_sign(Some(rsa::hash::Hash::SHA1));
public_key
.verify(padding, &hash, &remote_signature)
.map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidData,
HandshakeError::VerificationFailed,
)
})?;
// OK to proceed
let shared_secret = local_keys.shared_secret(&remote_key); let shared_secret = local_keys.shared_secret(&remote_key);
let (challenge, send_key, recv_key) = compute_keys(&shared_secret, &accumulator); let (challenge, send_key, recv_key) = compute_keys(&shared_secret, &accumulator)?;
let codec = ApCodec::new(&send_key, &recv_key); let codec = ApCodec::new(&send_key, &recv_key);
client_response(&mut connection, challenge).await?; client_response(&mut connection, challenge).await?;
@ -42,14 +104,56 @@ where
let mut client_nonce = vec![0; 0x10]; let mut client_nonce = vec![0; 0x10];
thread_rng().fill_bytes(&mut client_nonce); thread_rng().fill_bytes(&mut client_nonce);
let platform = match std::env::consts::OS {
"android" => Platform::PLATFORM_ANDROID_ARM,
"freebsd" | "netbsd" | "openbsd" => match ARCH {
"x86_64" => Platform::PLATFORM_FREEBSD_X86_64,
_ => Platform::PLATFORM_FREEBSD_X86,
},
"ios" => match ARCH {
"aarch64" => Platform::PLATFORM_IPHONE_ARM64,
_ => Platform::PLATFORM_IPHONE_ARM,
},
"linux" => match ARCH {
"arm" | "aarch64" => Platform::PLATFORM_LINUX_ARM,
"blackfin" => Platform::PLATFORM_LINUX_BLACKFIN,
"mips" => Platform::PLATFORM_LINUX_MIPS,
"sh" => Platform::PLATFORM_LINUX_SH,
"x86_64" => Platform::PLATFORM_LINUX_X86_64,
_ => Platform::PLATFORM_LINUX_X86,
},
"macos" => match ARCH {
"ppc" | "ppc64" => Platform::PLATFORM_OSX_PPC,
"x86_64" => Platform::PLATFORM_OSX_X86_64,
_ => Platform::PLATFORM_OSX_X86,
},
"windows" => match ARCH {
"arm" | "aarch64" => Platform::PLATFORM_WINDOWS_CE_ARM,
"x86_64" => Platform::PLATFORM_WIN32_X86_64,
_ => Platform::PLATFORM_WIN32_X86,
},
_ => Platform::PLATFORM_LINUX_X86,
};
#[cfg(debug_assertions)]
const PRODUCT_FLAGS: ProductFlags = ProductFlags::PRODUCT_FLAG_DEV_BUILD;
#[cfg(not(debug_assertions))]
const PRODUCT_FLAGS: ProductFlags = ProductFlags::PRODUCT_FLAG_NONE;
let mut packet = ClientHello::new(); let mut packet = ClientHello::new();
packet packet
.mut_build_info() .mut_build_info()
.set_product(protocol::keyexchange::Product::PRODUCT_PARTNER); // ProductInfo won't push autoplay and perhaps other settings
// when set to anything else than PRODUCT_CLIENT
.set_product(protocol::keyexchange::Product::PRODUCT_CLIENT);
packet packet
.mut_build_info() .mut_build_info()
.set_platform(protocol::keyexchange::Platform::PLATFORM_LINUX_X86); .mut_product_flags()
packet.mut_build_info().set_version(109800078); .push(PRODUCT_FLAGS);
packet.mut_build_info().set_platform(platform);
packet
.mut_build_info()
.set_version(version::SPOTIFY_VERSION);
packet packet
.mut_cryptosuites_supported() .mut_cryptosuites_supported()
.push(protocol::keyexchange::Cryptosuite::CRYPTO_SUITE_SHANNON); .push(protocol::keyexchange::Cryptosuite::CRYPTO_SUITE_SHANNON);
@ -66,8 +170,8 @@ where
let mut buffer = vec![0, 4]; let mut buffer = vec![0, 4];
let size = 2 + 4 + packet.compute_size(); let size = 2 + 4 + packet.compute_size();
<Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size).unwrap(); <Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size)?;
packet.write_to_vec(&mut buffer).unwrap(); packet.write_to_vec(&mut buffer)?;
connection.write_all(&buffer[..]).await?; connection.write_all(&buffer[..]).await?;
Ok(buffer) Ok(buffer)
@ -87,8 +191,8 @@ where
let mut buffer = vec![]; let mut buffer = vec![];
let size = 4 + packet.compute_size(); let size = 4 + packet.compute_size();
<Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size).unwrap(); <Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size)?;
packet.write_to_vec(&mut buffer).unwrap(); packet.write_to_vec(&mut buffer)?;
connection.write_all(&buffer[..]).await?; connection.write_all(&buffer[..]).await?;
Ok(()) Ok(())
@ -102,7 +206,7 @@ where
let header = read_into_accumulator(connection, 4, acc).await?; let header = read_into_accumulator(connection, 4, acc).await?;
let size = BigEndian::read_u32(header) as usize; let size = BigEndian::read_u32(header) as usize;
let data = read_into_accumulator(connection, size - 4, acc).await?; let data = read_into_accumulator(connection, size - 4, acc).await?;
let message = M::parse_from_bytes(data).unwrap(); let message = M::parse_from_bytes(data)?;
Ok(message) Ok(message)
} }
@ -118,24 +222,26 @@ async fn read_into_accumulator<'a, 'b, T: AsyncRead + Unpin>(
Ok(&mut acc[offset..]) Ok(&mut acc[offset..])
} }
fn compute_keys(shared_secret: &[u8], packets: &[u8]) -> (Vec<u8>, Vec<u8>, Vec<u8>) { fn compute_keys(shared_secret: &[u8], packets: &[u8]) -> io::Result<(Vec<u8>, Vec<u8>, Vec<u8>)> {
type HmacSha1 = Hmac<Sha1>; type HmacSha1 = Hmac<Sha1>;
let mut data = Vec::with_capacity(0x64); let mut data = Vec::with_capacity(0x64);
for i in 1..6 { for i in 1..6 {
let mut mac = let mut mac = HmacSha1::new_from_slice(shared_secret).map_err(|_| {
HmacSha1::new_from_slice(shared_secret).expect("HMAC can take key of any size"); io::Error::new(io::ErrorKind::InvalidData, HandshakeError::InvalidLength)
})?;
mac.update(packets); mac.update(packets);
mac.update(&[i]); mac.update(&[i]);
data.extend_from_slice(&mac.finalize().into_bytes()); data.extend_from_slice(&mac.finalize().into_bytes());
} }
let mut mac = HmacSha1::new_from_slice(&data[..0x14]).expect("HMAC can take key of any size"); let mut mac = HmacSha1::new_from_slice(&data[..0x14])
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, HandshakeError::InvalidLength))?;
mac.update(packets); mac.update(packets);
( Ok((
mac.finalize().into_bytes().to_vec(), mac.finalize().into_bytes().to_vec(),
data[0x14..0x34].to_vec(), data[0x14..0x34].to_vec(),
data[0x34..0x54].to_vec(), data[0x34..0x54].to_vec(),
) ))
} }

View file

@ -1,23 +1,21 @@
mod codec; mod codec;
mod handshake; mod handshake;
pub use self::codec::ApCodec; pub use self::{codec::ApCodec, handshake::handshake};
pub use self::handshake::handshake;
use std::io::{self, ErrorKind}; use std::io;
use std::net::ToSocketAddrs;
use futures_util::{SinkExt, StreamExt}; use futures_util::{SinkExt, StreamExt};
use protobuf::{self, Message, ProtobufError}; use num_traits::FromPrimitive;
use protobuf::{self, Message};
use thiserror::Error; use thiserror::Error;
use tokio::net::TcpStream; use tokio::net::TcpStream;
use tokio_util::codec::Framed; use tokio_util::codec::Framed;
use url::Url; use url::Url;
use crate::authentication::Credentials; use crate::{authentication::Credentials, packet::PacketType, version, Error};
use crate::protocol::keyexchange::{APLoginFailed, ErrorCode}; use crate::protocol::keyexchange::{APLoginFailed, ErrorCode};
use crate::proxytunnel;
use crate::version;
pub type Transport = Framed<TcpStream, ApCodec>; pub type Transport = Framed<TcpStream, ApCodec>;
@ -42,13 +40,19 @@ fn login_error_message(code: &ErrorCode) -> &'static str {
pub enum AuthenticationError { pub enum AuthenticationError {
#[error("Login failed with reason: {}", login_error_message(.0))] #[error("Login failed with reason: {}", login_error_message(.0))]
LoginFailed(ErrorCode), LoginFailed(ErrorCode),
#[error("Authentication failed: {0}")] #[error("invalid packet {0}")]
IoError(#[from] io::Error), Packet(u8),
#[error("transport returned no data")]
Transport,
} }
impl From<ProtobufError> for AuthenticationError { impl From<AuthenticationError> for Error {
fn from(e: ProtobufError) -> Self { fn from(err: AuthenticationError) -> Self {
io::Error::new(ErrorKind::InvalidData, e).into() match err {
AuthenticationError::LoginFailed(_) => Error::permission_denied(err),
AuthenticationError::Packet(_) => Error::unimplemented(err),
AuthenticationError::Transport => Error::unavailable(err),
}
} }
} }
@ -58,50 +62,8 @@ impl From<APLoginFailed> for AuthenticationError {
} }
} }
pub async fn connect(addr: String, proxy: Option<&Url>) -> io::Result<Transport> { pub async fn connect(host: &str, port: u16, proxy: Option<&Url>) -> io::Result<Transport> {
let socket = if let Some(proxy_url) = proxy { let socket = crate::socket::connect(host, port, proxy).await?;
info!("Using proxy \"{}\"", proxy_url);
let socket_addr = proxy_url.socket_addrs(|| None).and_then(|addrs| {
addrs.into_iter().next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve proxy server address",
)
})
})?;
let socket = TcpStream::connect(&socket_addr).await?;
let uri = addr.parse::<http::Uri>().map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidData,
"Can't parse access point address",
)
})?;
let host = uri.host().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"The access point address contains no hostname",
)
})?;
let port = uri.port().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"The access point address contains no port",
)
})?;
proxytunnel::proxy_connect(socket, host, port.as_str()).await?
} else {
let socket_addr = addr.to_socket_addrs()?.next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve access point address",
)
})?;
TcpStream::connect(&socket_addr).await?
};
handshake(socket).await handshake(socket).await
} }
@ -110,9 +72,32 @@ pub async fn authenticate(
transport: &mut Transport, transport: &mut Transport,
credentials: Credentials, credentials: Credentials,
device_id: &str, device_id: &str,
) -> Result<Credentials, AuthenticationError> { ) -> Result<Credentials, Error> {
use crate::protocol::authentication::{APWelcome, ClientResponseEncrypted, CpuFamily, Os}; use crate::protocol::authentication::{APWelcome, ClientResponseEncrypted, CpuFamily, Os};
let cpu_family = match std::env::consts::ARCH {
"blackfin" => CpuFamily::CPU_BLACKFIN,
"arm" | "arm64" => CpuFamily::CPU_ARM,
"ia64" => CpuFamily::CPU_IA64,
"mips" => CpuFamily::CPU_MIPS,
"ppc" => CpuFamily::CPU_PPC,
"ppc64" => CpuFamily::CPU_PPC_64,
"sh" => CpuFamily::CPU_SH,
"x86" => CpuFamily::CPU_X86,
"x86_64" => CpuFamily::CPU_X86_64,
_ => CpuFamily::CPU_UNKNOWN,
};
let os = match std::env::consts::OS {
"android" => Os::OS_ANDROID,
"freebsd" | "netbsd" | "openbsd" => Os::OS_FREEBSD,
"ios" => Os::OS_IPHONE,
"linux" => Os::OS_LINUX,
"macos" => Os::OS_OSX,
"windows" => Os::OS_WINDOWS,
_ => Os::OS_UNKNOWN,
};
let mut packet = ClientResponseEncrypted::new(); let mut packet = ClientResponseEncrypted::new();
packet packet
.mut_login_credentials() .mut_login_credentials()
@ -123,29 +108,31 @@ pub async fn authenticate(
packet packet
.mut_login_credentials() .mut_login_credentials()
.set_auth_data(credentials.auth_data); .set_auth_data(credentials.auth_data);
packet packet.mut_system_info().set_cpu_family(cpu_family);
.mut_system_info() packet.mut_system_info().set_os(os);
.set_cpu_family(CpuFamily::CPU_UNKNOWN);
packet.mut_system_info().set_os(Os::OS_UNKNOWN);
packet packet
.mut_system_info() .mut_system_info()
.set_system_information_string(format!( .set_system_information_string(format!(
"librespot_{}_{}", "librespot-{}-{}",
version::SHA_SHORT, version::SHA_SHORT,
version::BUILD_ID version::BUILD_ID
)); ));
packet packet
.mut_system_info() .mut_system_info()
.set_device_id(device_id.to_string()); .set_device_id(device_id.to_string());
packet.set_version_string(version::VERSION_STRING.to_string()); packet.set_version_string(format!("librespot {}", version::SEMVER));
let cmd = 0xab; let cmd = PacketType::Login;
let data = packet.write_to_bytes().unwrap(); let data = packet.write_to_bytes()?;
transport.send((cmd, data)).await?; transport.send((cmd as u8, data)).await?;
let (cmd, data) = transport.next().await.expect("EOF")?; let (cmd, data) = transport
match cmd { .next()
0xac => { .await
.ok_or(AuthenticationError::Transport)??;
let packet_type = FromPrimitive::from_u8(cmd);
let result = match packet_type {
Some(PacketType::APWelcome) => {
let welcome_data = APWelcome::parse_from_bytes(data.as_ref())?; let welcome_data = APWelcome::parse_from_bytes(data.as_ref())?;
let reusable_credentials = Credentials { let reusable_credentials = Credentials {
@ -156,13 +143,18 @@ pub async fn authenticate(
Ok(reusable_credentials) Ok(reusable_credentials)
} }
0xad => { Some(PacketType::AuthFailure) => {
let error_data = APLoginFailed::parse_from_bytes(data.as_ref())?; let error_data = APLoginFailed::parse_from_bytes(data.as_ref())?;
Err(error_data.into()) Err(error_data.into())
} }
_ => { _ => {
let msg = format!("Received invalid packet: {}", cmd); trace!(
Err(io::Error::new(ErrorKind::InvalidData, msg).into()) "Did not expect {:?} AES key packet with data {:#?}",
cmd,
data
);
Err(AuthenticationError::Packet(cmd))
} }
} };
Ok(result?)
} }

81
core/src/date.rs Normal file
View file

@ -0,0 +1,81 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use time::{error::ComponentRange, Date as _Date, OffsetDateTime, PrimitiveDateTime, Time};
use crate::Error;
use librespot_protocol as protocol;
use protocol::metadata::Date as DateMessage;
impl From<ComponentRange> for Error {
fn from(err: ComponentRange) -> Self {
Error::out_of_range(err)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Date(pub OffsetDateTime);
impl Deref for Date {
type Target = OffsetDateTime;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Date {
pub fn as_timestamp_ms(&self) -> i64 {
(self.0.unix_timestamp_nanos() / 1_000_000) as i64
}
pub fn from_timestamp_ms(timestamp: i64) -> Result<Self, Error> {
let date_time = OffsetDateTime::from_unix_timestamp_nanos(timestamp as i128 * 1_000_000)?;
Ok(Self(date_time))
}
pub fn as_utc(&self) -> OffsetDateTime {
self.0
}
pub fn from_utc(date_time: PrimitiveDateTime) -> Self {
Self(date_time.assume_utc())
}
pub fn now_utc() -> Self {
Self(OffsetDateTime::now_utc())
}
}
impl TryFrom<&DateMessage> for Date {
type Error = crate::Error;
fn try_from(msg: &DateMessage) -> Result<Self, Self::Error> {
// Some metadata contains a year, but no month. In that case just set January.
let month = if msg.has_month() {
msg.get_month() as u8
} else {
1
};
// Having no day will work, but may be unexpected: it will imply the last day
// of the month before. So prevent that, and just set day 1.
let day = if msg.has_day() {
msg.get_day() as u8
} else {
1
};
let date = _Date::from_calendar_date(msg.get_year(), month.try_into()?, day)?;
let time = Time::from_hms(msg.get_hour() as u8, msg.get_minute() as u8, 0)?;
Ok(Self::from_utc(PrimitiveDateTime::new(date, time)))
}
}
impl From<OffsetDateTime> for Date {
fn from(datetime: OffsetDateTime) -> Self {
Self(datetime)
}
}

130
core/src/dealer/maps.rs Normal file
View file

@ -0,0 +1,130 @@
use std::collections::HashMap;
use thiserror::Error;
use crate::Error;
#[derive(Debug, Error)]
pub enum HandlerMapError {
#[error("request was already handled")]
AlreadyHandled,
}
impl From<HandlerMapError> for Error {
fn from(err: HandlerMapError) -> Self {
Error::aborted(err)
}
}
pub enum HandlerMap<T> {
Leaf(T),
Branch(HashMap<String, HandlerMap<T>>),
}
impl<T> Default for HandlerMap<T> {
fn default() -> Self {
Self::Branch(HashMap::new())
}
}
impl<T> HandlerMap<T> {
pub fn insert<'a>(
&mut self,
mut path: impl Iterator<Item = &'a str>,
handler: T,
) -> Result<(), Error> {
match self {
Self::Leaf(_) => Err(HandlerMapError::AlreadyHandled.into()),
Self::Branch(children) => {
if let Some(component) = path.next() {
let node = children.entry(component.to_owned()).or_default();
node.insert(path, handler)
} else if children.is_empty() {
*self = Self::Leaf(handler);
Ok(())
} else {
Err(HandlerMapError::AlreadyHandled.into())
}
}
}
}
pub fn get<'a>(&self, mut path: impl Iterator<Item = &'a str>) -> Option<&T> {
match self {
Self::Leaf(t) => Some(t),
Self::Branch(m) => {
let component = path.next()?;
m.get(component)?.get(path)
}
}
}
pub fn remove<'a>(&mut self, mut path: impl Iterator<Item = &'a str>) -> Option<T> {
match self {
Self::Leaf(_) => match std::mem::take(self) {
Self::Leaf(t) => Some(t),
_ => unreachable!(),
},
Self::Branch(map) => {
let component = path.next()?;
let next = map.get_mut(component)?;
let result = next.remove(path);
match &*next {
Self::Branch(b) if b.is_empty() => {
map.remove(component);
}
_ => (),
}
result
}
}
}
}
pub struct SubscriberMap<T> {
subscribed: Vec<T>,
children: HashMap<String, SubscriberMap<T>>,
}
impl<T> Default for SubscriberMap<T> {
fn default() -> Self {
Self {
subscribed: Vec::new(),
children: HashMap::new(),
}
}
}
impl<T> SubscriberMap<T> {
pub fn insert<'a>(&mut self, mut path: impl Iterator<Item = &'a str>, handler: T) {
if let Some(component) = path.next() {
self.children
.entry(component.to_owned())
.or_default()
.insert(path, handler);
} else {
self.subscribed.push(handler);
}
}
pub fn is_empty(&self) -> bool {
self.children.is_empty() && self.subscribed.is_empty()
}
pub fn retain<'a>(
&mut self,
mut path: impl Iterator<Item = &'a str>,
fun: &mut impl FnMut(&T) -> bool,
) {
self.subscribed.retain(|x| fun(x));
if let Some(next) = path.next() {
if let Some(y) = self.children.get_mut(next) {
y.retain(path, fun);
if y.is_empty() {
self.children.remove(next);
}
}
}
}
}

612
core/src/dealer/mod.rs Normal file
View file

@ -0,0 +1,612 @@
mod maps;
pub mod protocol;
use std::{
iter,
pin::Pin,
sync::{
atomic::{self, AtomicBool},
Arc,
},
task::Poll,
time::Duration,
};
use futures_core::{Future, Stream};
use futures_util::{future::join_all, SinkExt, StreamExt};
use parking_lot::Mutex;
use thiserror::Error;
use tokio::{
select,
sync::{
mpsc::{self, UnboundedReceiver},
Semaphore,
},
task::JoinHandle,
};
use tokio_tungstenite::tungstenite;
use tungstenite::error::UrlError;
use url::Url;
use self::maps::*;
use self::protocol::*;
use crate::{
socket,
util::{keep_flushing, CancelOnDrop, TimeoutOnDrop},
Error,
};
type WsMessage = tungstenite::Message;
type WsError = tungstenite::Error;
type WsResult<T> = Result<T, tungstenite::Error>;
const WEBSOCKET_CLOSE_TIMEOUT: Duration = Duration::from_secs(3);
const PING_INTERVAL: Duration = Duration::from_secs(30);
const PING_TIMEOUT: Duration = Duration::from_secs(3);
const RECONNECT_INTERVAL: Duration = Duration::from_secs(10);
pub struct Response {
pub success: bool,
}
pub struct Responder {
key: String,
tx: mpsc::UnboundedSender<WsMessage>,
sent: bool,
}
impl Responder {
fn new(key: String, tx: mpsc::UnboundedSender<WsMessage>) -> Self {
Self {
key,
tx,
sent: false,
}
}
// Should only be called once
fn send_internal(&mut self, response: Response) {
let response = serde_json::json!({
"type": "reply",
"key": &self.key,
"payload": {
"success": response.success,
}
})
.to_string();
if let Err(e) = self.tx.send(WsMessage::Text(response)) {
warn!("Wasn't able to reply to dealer request: {}", e);
}
}
pub fn send(mut self, response: Response) {
self.send_internal(response);
self.sent = true;
}
pub fn force_unanswered(mut self) {
self.sent = true;
}
}
impl Drop for Responder {
fn drop(&mut self) {
if !self.sent {
self.send_internal(Response { success: false });
}
}
}
pub trait IntoResponse {
fn respond(self, responder: Responder);
}
impl IntoResponse for Response {
fn respond(self, responder: Responder) {
responder.send(self)
}
}
impl<F> IntoResponse for F
where
F: Future<Output = Response> + Send + 'static,
{
fn respond(self, responder: Responder) {
tokio::spawn(async move {
responder.send(self.await);
});
}
}
impl<F, R> RequestHandler for F
where
F: (Fn(Request) -> R) + Send + 'static,
R: IntoResponse,
{
fn handle_request(&self, request: Request, responder: Responder) {
self(request).respond(responder);
}
}
pub trait RequestHandler: Send + 'static {
fn handle_request(&self, request: Request, responder: Responder);
}
type MessageHandler = mpsc::UnboundedSender<Message>;
// TODO: Maybe it's possible to unregister subscription directly when they
// are dropped instead of on next failed attempt.
pub struct Subscription(UnboundedReceiver<Message>);
impl Stream for Subscription {
type Item = Message;
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> Poll<Option<Self::Item>> {
self.0.poll_recv(cx)
}
}
fn split_uri(s: &str) -> Option<impl Iterator<Item = &'_ str>> {
let (scheme, sep, rest) = if let Some(rest) = s.strip_prefix("hm://") {
("hm", '/', rest)
} else if let Some(rest) = s.strip_suffix("spotify:") {
("spotify", ':', rest)
} else {
return None;
};
let rest = rest.trim_end_matches(sep);
let split = rest.split(sep);
Some(iter::once(scheme).chain(split))
}
#[derive(Debug, Clone, Error)]
pub enum AddHandlerError {
#[error("There is already a handler for the given uri")]
AlreadyHandled,
#[error("The specified uri {0} is invalid")]
InvalidUri(String),
}
impl From<AddHandlerError> for Error {
fn from(err: AddHandlerError) -> Self {
match err {
AddHandlerError::AlreadyHandled => Error::aborted(err),
AddHandlerError::InvalidUri(_) => Error::invalid_argument(err),
}
}
}
#[derive(Debug, Clone, Error)]
pub enum SubscriptionError {
#[error("The specified uri is invalid")]
InvalidUri(String),
}
impl From<SubscriptionError> for Error {
fn from(err: SubscriptionError) -> Self {
Error::invalid_argument(err)
}
}
fn add_handler(
map: &mut HandlerMap<Box<dyn RequestHandler>>,
uri: &str,
handler: impl RequestHandler,
) -> Result<(), Error> {
let split = split_uri(uri).ok_or_else(|| AddHandlerError::InvalidUri(uri.to_string()))?;
map.insert(split, Box::new(handler))
}
fn remove_handler<T>(map: &mut HandlerMap<T>, uri: &str) -> Option<T> {
map.remove(split_uri(uri)?)
}
fn subscribe(
map: &mut SubscriberMap<MessageHandler>,
uris: &[&str],
) -> Result<Subscription, Error> {
let (tx, rx) = mpsc::unbounded_channel();
for &uri in uris {
let split = split_uri(uri).ok_or_else(|| SubscriptionError::InvalidUri(uri.to_string()))?;
map.insert(split, tx.clone());
}
Ok(Subscription(rx))
}
#[derive(Default)]
pub struct Builder {
message_handlers: SubscriberMap<MessageHandler>,
request_handlers: HandlerMap<Box<dyn RequestHandler>>,
}
macro_rules! create_dealer {
($builder:expr, $shared:ident -> $body:expr) => {
match $builder {
builder => {
let shared = Arc::new(DealerShared {
message_handlers: Mutex::new(builder.message_handlers),
request_handlers: Mutex::new(builder.request_handlers),
notify_drop: Semaphore::new(0),
});
let handle = {
let $shared = Arc::clone(&shared);
tokio::spawn($body)
};
Dealer {
shared,
handle: TimeoutOnDrop::new(handle, WEBSOCKET_CLOSE_TIMEOUT),
}
}
}
};
}
impl Builder {
pub fn new() -> Self {
Self::default()
}
pub fn add_handler(&mut self, uri: &str, handler: impl RequestHandler) -> Result<(), Error> {
add_handler(&mut self.request_handlers, uri, handler)
}
pub fn subscribe(&mut self, uris: &[&str]) -> Result<Subscription, Error> {
subscribe(&mut self.message_handlers, uris)
}
pub fn launch_in_background<Fut, F>(self, get_url: F, proxy: Option<Url>) -> Dealer
where
Fut: Future<Output = Url> + Send + 'static,
F: (FnMut() -> Fut) + Send + 'static,
{
create_dealer!(self, shared -> run(shared, None, get_url, proxy))
}
pub async fn launch<Fut, F>(self, mut get_url: F, proxy: Option<Url>) -> WsResult<Dealer>
where
Fut: Future<Output = Url> + Send + 'static,
F: (FnMut() -> Fut) + Send + 'static,
{
let dealer = create_dealer!(self, shared -> {
// Try to connect.
let url = get_url().await;
let tasks = connect(&url, proxy.as_ref(), &shared).await?;
// If a connection is established, continue in a background task.
run(shared, Some(tasks), get_url, proxy)
});
Ok(dealer)
}
}
struct DealerShared {
message_handlers: Mutex<SubscriberMap<MessageHandler>>,
request_handlers: Mutex<HandlerMap<Box<dyn RequestHandler>>>,
// Semaphore with 0 permits. By closing this semaphore, we indicate
// that the actual Dealer struct has been dropped.
notify_drop: Semaphore,
}
impl DealerShared {
fn dispatch_message(&self, msg: Message) {
if let Some(split) = split_uri(&msg.uri) {
self.message_handlers
.lock()
.retain(split, &mut |tx| tx.send(msg.clone()).is_ok());
}
}
fn dispatch_request(&self, request: Request, send_tx: &mpsc::UnboundedSender<WsMessage>) {
// ResponseSender will automatically send "success: false" if it is dropped without an answer.
let responder = Responder::new(request.key.clone(), send_tx.clone());
let split = if let Some(split) = split_uri(&request.message_ident) {
split
} else {
warn!(
"Dealer request with invalid message_ident: {}",
&request.message_ident
);
return;
};
{
let handler_map = self.request_handlers.lock();
if let Some(handler) = handler_map.get(split) {
handler.handle_request(request, responder);
return;
}
}
warn!("No handler for message_ident: {}", &request.message_ident);
}
fn dispatch(&self, m: MessageOrRequest, send_tx: &mpsc::UnboundedSender<WsMessage>) {
match m {
MessageOrRequest::Message(m) => self.dispatch_message(m),
MessageOrRequest::Request(r) => self.dispatch_request(r, send_tx),
}
}
async fn closed(&self) {
if self.notify_drop.acquire().await.is_ok() {
error!("should never have gotten a permit");
}
}
fn is_closed(&self) -> bool {
self.notify_drop.is_closed()
}
}
pub struct Dealer {
shared: Arc<DealerShared>,
handle: TimeoutOnDrop<()>,
}
impl Dealer {
pub fn add_handler<H>(&self, uri: &str, handler: H) -> Result<(), Error>
where
H: RequestHandler,
{
add_handler(&mut self.shared.request_handlers.lock(), uri, handler)
}
pub fn remove_handler(&self, uri: &str) -> Option<Box<dyn RequestHandler>> {
remove_handler(&mut self.shared.request_handlers.lock(), uri)
}
pub fn subscribe(&self, uris: &[&str]) -> Result<Subscription, Error> {
subscribe(&mut self.shared.message_handlers.lock(), uris)
}
pub async fn close(mut self) {
debug!("closing dealer");
self.shared.notify_drop.close();
if let Some(handle) = self.handle.take() {
if let Err(e) = CancelOnDrop(handle).await {
error!("error aborting dealer operations: {}", e);
}
}
}
}
/// Initializes a connection and returns futures that will finish when the connection is closed/lost.
async fn connect(
address: &Url,
proxy: Option<&Url>,
shared: &Arc<DealerShared>,
) -> WsResult<(JoinHandle<()>, JoinHandle<()>)> {
let host = address
.host_str()
.ok_or(WsError::Url(UrlError::NoHostName))?;
let default_port = match address.scheme() {
"ws" => 80,
"wss" => 443,
_ => return Err(WsError::Url(UrlError::UnsupportedUrlScheme)),
};
let port = address.port().unwrap_or(default_port);
let stream = socket::connect(host, port, proxy).await?;
let (mut ws_tx, ws_rx) = tokio_tungstenite::client_async_tls(address, stream)
.await?
.0
.split();
let (send_tx, mut send_rx) = mpsc::unbounded_channel::<WsMessage>();
// Spawn a task that will forward messages from the channel to the websocket.
let send_task = {
let shared = Arc::clone(shared);
tokio::spawn(async move {
let result = loop {
select! {
biased;
() = shared.closed() => {
break Ok(None);
}
msg = send_rx.recv() => {
if let Some(msg) = msg {
// New message arrived through channel
if let WsMessage::Close(close_frame) = msg {
break Ok(close_frame);
}
if let Err(e) = ws_tx.feed(msg).await {
break Err(e);
}
} else {
break Ok(None);
}
},
e = keep_flushing(&mut ws_tx) => {
break Err(e)
}
else => (),
}
};
send_rx.close();
// I don't trust in tokio_tungstenite's implementation of Sink::close.
let result = match result {
Ok(close_frame) => ws_tx.send(WsMessage::Close(close_frame)).await,
Err(WsError::AlreadyClosed) | Err(WsError::ConnectionClosed) => ws_tx.flush().await,
Err(e) => {
warn!("Dealer finished with an error: {}", e);
ws_tx.send(WsMessage::Close(None)).await
}
};
if let Err(e) = result {
warn!("Error while closing websocket: {}", e);
}
debug!("Dropping send task");
})
};
let shared = Arc::clone(shared);
// A task that receives messages from the web socket.
let receive_task = tokio::spawn(async {
let pong_received = AtomicBool::new(true);
let send_tx = send_tx;
let shared = shared;
let receive_task = async {
let mut ws_rx = ws_rx;
loop {
match ws_rx.next().await {
Some(Ok(msg)) => match msg {
WsMessage::Text(t) => match serde_json::from_str(&t) {
Ok(m) => shared.dispatch(m, &send_tx),
Err(e) => info!("Received invalid message: {}", e),
},
WsMessage::Binary(_) => {
info!("Received invalid binary message");
}
WsMessage::Pong(_) => {
debug!("Received pong");
pong_received.store(true, atomic::Ordering::Relaxed);
}
_ => (), // tungstenite handles Close and Ping automatically
},
Some(Err(e)) => {
warn!("Websocket connection failed: {}", e);
break;
}
None => {
debug!("Websocket connection closed.");
break;
}
}
}
};
// Sends pings and checks whether a pong comes back.
let ping_task = async {
use tokio::time::{interval, sleep};
let mut timer = interval(PING_INTERVAL);
loop {
timer.tick().await;
pong_received.store(false, atomic::Ordering::Relaxed);
if send_tx.send(WsMessage::Ping(vec![])).is_err() {
// The sender is closed.
break;
}
debug!("Sent ping");
sleep(PING_TIMEOUT).await;
if !pong_received.load(atomic::Ordering::SeqCst) {
// No response
warn!("Websocket peer does not respond.");
break;
}
}
};
// Exit this task as soon as one our subtasks fails.
// In both cases the connection is probably lost.
select! {
() = ping_task => (),
() = receive_task => ()
}
// Try to take send_task down with us, in case it's still alive.
let _ = send_tx.send(WsMessage::Close(None));
debug!("Dropping receive task");
});
Ok((send_task, receive_task))
}
/// The main background task for `Dealer`, which coordinates reconnecting.
async fn run<F, Fut>(
shared: Arc<DealerShared>,
initial_tasks: Option<(JoinHandle<()>, JoinHandle<()>)>,
mut get_url: F,
proxy: Option<Url>,
) where
Fut: Future<Output = Url> + Send + 'static,
F: (FnMut() -> Fut) + Send + 'static,
{
let init_task = |t| Some(TimeoutOnDrop::new(t, WEBSOCKET_CLOSE_TIMEOUT));
let mut tasks = if let Some((s, r)) = initial_tasks {
(init_task(s), init_task(r))
} else {
(None, None)
};
while !shared.is_closed() {
match &mut tasks {
(Some(t0), Some(t1)) => {
select! {
() = shared.closed() => break,
r = t0 => {
if let Err(e) = r {
error!("timeout on task 0: {}", e);
}
tasks.0.take();
},
r = t1 => {
if let Err(e) = r {
error!("timeout on task 1: {}", e);
}
tasks.1.take();
}
}
}
_ => {
let url = select! {
() = shared.closed() => {
break
},
e = get_url() => e
};
match connect(&url, proxy.as_ref(), &shared).await {
Ok((s, r)) => tasks = (init_task(s), init_task(r)),
Err(e) => {
error!("Error while connecting: {}", e);
tokio::time::sleep(RECONNECT_INTERVAL).await;
}
}
}
}
}
let tasks = tasks.0.into_iter().chain(tasks.1);
let _ = join_all(tasks).await;
}

View file

@ -0,0 +1,39 @@
use std::collections::HashMap;
use serde::Deserialize;
pub type JsonValue = serde_json::Value;
pub type JsonObject = serde_json::Map<String, JsonValue>;
#[derive(Clone, Debug, Deserialize)]
pub struct Payload {
pub message_id: i32,
pub sent_by_device_id: String,
pub command: JsonObject,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Request {
#[serde(default)]
pub headers: HashMap<String, String>,
pub message_ident: String,
pub key: String,
pub payload: Payload,
}
#[derive(Clone, Debug, Deserialize)]
pub struct Message {
#[serde(default)]
pub headers: HashMap<String, String>,
pub method: Option<String>,
#[serde(default)]
pub payloads: Vec<JsonValue>,
pub uri: String,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub(super) enum MessageOrRequest {
Message(Message),
Request(Request),
}

447
core/src/error.rs Normal file
View file

@ -0,0 +1,447 @@
use std::{error, fmt, num::ParseIntError, str::Utf8Error, string::FromUtf8Error};
use base64::DecodeError;
use http::{
header::{InvalidHeaderName, InvalidHeaderValue, ToStrError},
method::InvalidMethod,
status::InvalidStatusCode,
uri::{InvalidUri, InvalidUriParts},
};
use protobuf::ProtobufError;
use thiserror::Error;
use tokio::sync::{mpsc::error::SendError, oneshot::error::RecvError};
use url::ParseError;
#[cfg(feature = "with-dns-sd")]
use dns_sd::DNSError;
#[derive(Debug)]
pub struct Error {
pub kind: ErrorKind,
pub error: Box<dyn error::Error + Send + Sync>,
}
#[derive(Clone, Copy, Debug, Eq, Error, Hash, Ord, PartialEq, PartialOrd)]
pub enum ErrorKind {
#[error("The operation was cancelled by the caller")]
Cancelled = 1,
#[error("Unknown error")]
Unknown = 2,
#[error("Client specified an invalid argument")]
InvalidArgument = 3,
#[error("Deadline expired before operation could complete")]
DeadlineExceeded = 4,
#[error("Requested entity was not found")]
NotFound = 5,
#[error("Attempt to create entity that already exists")]
AlreadyExists = 6,
#[error("Permission denied")]
PermissionDenied = 7,
#[error("No valid authentication credentials")]
Unauthenticated = 16,
#[error("Resource has been exhausted")]
ResourceExhausted = 8,
#[error("Invalid state")]
FailedPrecondition = 9,
#[error("Operation aborted")]
Aborted = 10,
#[error("Operation attempted past the valid range")]
OutOfRange = 11,
#[error("Not implemented")]
Unimplemented = 12,
#[error("Internal error")]
Internal = 13,
#[error("Service unavailable")]
Unavailable = 14,
#[error("Unrecoverable data loss or corruption")]
DataLoss = 15,
#[error("Operation must not be used")]
DoNotUse = -1,
}
#[derive(Debug, Error)]
struct ErrorMessage(String);
impl fmt::Display for ErrorMessage {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Error {
pub fn new<E>(kind: ErrorKind, error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind,
error: error.into(),
}
}
pub fn aborted<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Aborted,
error: error.into(),
}
}
pub fn already_exists<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::AlreadyExists,
error: error.into(),
}
}
pub fn cancelled<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Cancelled,
error: error.into(),
}
}
pub fn data_loss<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::DataLoss,
error: error.into(),
}
}
pub fn deadline_exceeded<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::DeadlineExceeded,
error: error.into(),
}
}
pub fn do_not_use<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::DoNotUse,
error: error.into(),
}
}
pub fn failed_precondition<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::FailedPrecondition,
error: error.into(),
}
}
pub fn internal<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Internal,
error: error.into(),
}
}
pub fn invalid_argument<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::InvalidArgument,
error: error.into(),
}
}
pub fn not_found<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::NotFound,
error: error.into(),
}
}
pub fn out_of_range<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::OutOfRange,
error: error.into(),
}
}
pub fn permission_denied<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::PermissionDenied,
error: error.into(),
}
}
pub fn resource_exhausted<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::ResourceExhausted,
error: error.into(),
}
}
pub fn unauthenticated<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Unauthenticated,
error: error.into(),
}
}
pub fn unavailable<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Unavailable,
error: error.into(),
}
}
pub fn unimplemented<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Unimplemented,
error: error.into(),
}
}
pub fn unknown<E>(error: E) -> Error
where
E: Into<Box<dyn error::Error + Send + Sync>>,
{
Self {
kind: ErrorKind::Unknown,
error: error.into(),
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.error.source()
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{} {{ ", self.kind)?;
self.error.fmt(fmt)?;
write!(fmt, " }}")
}
}
impl From<DecodeError> for Error {
fn from(err: DecodeError) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
#[cfg(feature = "with-dns-sd")]
impl From<DNSError> for Error {
fn from(err: DNSError) -> Self {
Self::new(ErrorKind::Unavailable, err)
}
}
impl From<http::Error> for Error {
fn from(err: http::Error) -> Self {
if err.is::<InvalidHeaderName>()
|| err.is::<InvalidHeaderValue>()
|| err.is::<InvalidMethod>()
|| err.is::<InvalidUri>()
|| err.is::<InvalidUriParts>()
{
return Self::new(ErrorKind::InvalidArgument, err);
}
if err.is::<InvalidStatusCode>() {
return Self::new(ErrorKind::FailedPrecondition, err);
}
Self::new(ErrorKind::Unknown, err)
}
}
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Self {
if err.is_parse() || err.is_parse_too_large() || err.is_parse_status() || err.is_user() {
return Self::new(ErrorKind::Internal, err);
}
if err.is_canceled() {
return Self::new(ErrorKind::Cancelled, err);
}
if err.is_connect() {
return Self::new(ErrorKind::Unavailable, err);
}
if err.is_incomplete_message() {
return Self::new(ErrorKind::DataLoss, err);
}
if err.is_body_write_aborted() || err.is_closed() {
return Self::new(ErrorKind::Aborted, err);
}
if err.is_timeout() {
return Self::new(ErrorKind::DeadlineExceeded, err);
}
Self::new(ErrorKind::Unknown, err)
}
}
impl From<quick_xml::Error> for Error {
fn from(err: quick_xml::Error) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
use std::io::ErrorKind as IoErrorKind;
match err.kind() {
IoErrorKind::NotFound => Self::new(ErrorKind::NotFound, err),
IoErrorKind::PermissionDenied => Self::new(ErrorKind::PermissionDenied, err),
IoErrorKind::AddrInUse | IoErrorKind::AlreadyExists => {
Self::new(ErrorKind::AlreadyExists, err)
}
IoErrorKind::AddrNotAvailable
| IoErrorKind::ConnectionRefused
| IoErrorKind::NotConnected => Self::new(ErrorKind::Unavailable, err),
IoErrorKind::BrokenPipe
| IoErrorKind::ConnectionReset
| IoErrorKind::ConnectionAborted => Self::new(ErrorKind::Aborted, err),
IoErrorKind::Interrupted | IoErrorKind::WouldBlock => {
Self::new(ErrorKind::Cancelled, err)
}
IoErrorKind::InvalidData | IoErrorKind::UnexpectedEof => {
Self::new(ErrorKind::FailedPrecondition, err)
}
IoErrorKind::TimedOut => Self::new(ErrorKind::DeadlineExceeded, err),
IoErrorKind::InvalidInput => Self::new(ErrorKind::InvalidArgument, err),
IoErrorKind::WriteZero => Self::new(ErrorKind::ResourceExhausted, err),
_ => Self::new(ErrorKind::Unknown, err),
}
}
}
impl From<FromUtf8Error> for Error {
fn from(err: FromUtf8Error) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<InvalidHeaderValue> for Error {
fn from(err: InvalidHeaderValue) -> Self {
Self::new(ErrorKind::InvalidArgument, err)
}
}
impl From<InvalidUri> for Error {
fn from(err: InvalidUri) -> Self {
Self::new(ErrorKind::InvalidArgument, err)
}
}
impl From<ParseError> for Error {
fn from(err: ParseError) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<ParseIntError> for Error {
fn from(err: ParseIntError) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<ProtobufError> for Error {
fn from(err: ProtobufError) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<RecvError> for Error {
fn from(err: RecvError) -> Self {
Self::new(ErrorKind::Internal, err)
}
}
impl<T> From<SendError<T>> for Error {
fn from(err: SendError<T>) -> Self {
Self {
kind: ErrorKind::Internal,
error: ErrorMessage(err.to_string()).into(),
}
}
}
impl From<ToStrError> for Error {
fn from(err: ToStrError) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}
impl From<Utf8Error> for Error {
fn from(err: Utf8Error) -> Self {
Self::new(ErrorKind::FailedPrecondition, err)
}
}

56
core/src/file_id.rs Normal file
View file

@ -0,0 +1,56 @@
use std::fmt;
use librespot_protocol as protocol;
use crate::{spotify_id::to_base16, Error};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct FileId(pub [u8; 20]);
impl FileId {
pub fn from_raw(src: &[u8]) -> FileId {
let mut dst = [0u8; 20];
dst.clone_from_slice(src);
FileId(dst)
}
#[allow(clippy::wrong_self_convention)]
pub fn to_base16(&self) -> Result<String, Error> {
to_base16(&self.0, &mut [0u8; 40])
}
}
impl fmt::Debug for FileId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("FileId").field(&self.to_base16()).finish()
}
}
impl fmt::Display for FileId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_base16().unwrap_or_default())
}
}
impl From<&[u8]> for FileId {
fn from(src: &[u8]) -> Self {
Self::from_raw(src)
}
}
impl From<&protocol::metadata::Image> for FileId {
fn from(image: &protocol::metadata::Image) -> Self {
Self::from(image.get_file_id())
}
}
impl From<&protocol::metadata::AudioFile> for FileId {
fn from(file: &protocol::metadata::AudioFile) -> Self {
Self::from(file.get_file_id())
}
}
impl From<&protocol::metadata::VideoFile> for FileId {
fn from(video: &protocol::metadata::VideoFile) -> Self {
Self::from(video.get_file_id())
}
}

164
core/src/http_client.rs Normal file
View file

@ -0,0 +1,164 @@
use std::env::consts::OS;
use bytes::Bytes;
use futures_util::{future::IntoStream, FutureExt};
use http::header::HeaderValue;
use hyper::{
client::{HttpConnector, ResponseFuture},
header::USER_AGENT,
Body, Client, Request, Response, StatusCode,
};
use hyper_proxy::{Intercept, Proxy, ProxyConnector};
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use thiserror::Error;
use url::Url;
use crate::{
version::{FALLBACK_USER_AGENT, SPOTIFY_MOBILE_VERSION, SPOTIFY_VERSION, VERSION_STRING},
Error,
};
#[derive(Debug, Error)]
pub enum HttpClientError {
#[error("Response status code: {0}")]
StatusCode(hyper::StatusCode),
}
impl From<HttpClientError> for Error {
fn from(err: HttpClientError) -> Self {
match err {
HttpClientError::StatusCode(code) => {
// not exhaustive, but what reasonably could be expected
match code {
StatusCode::GATEWAY_TIMEOUT | StatusCode::REQUEST_TIMEOUT => {
Error::deadline_exceeded(err)
}
StatusCode::GONE
| StatusCode::NOT_FOUND
| StatusCode::MOVED_PERMANENTLY
| StatusCode::PERMANENT_REDIRECT
| StatusCode::TEMPORARY_REDIRECT => Error::not_found(err),
StatusCode::FORBIDDEN | StatusCode::PAYMENT_REQUIRED => {
Error::permission_denied(err)
}
StatusCode::NETWORK_AUTHENTICATION_REQUIRED
| StatusCode::PROXY_AUTHENTICATION_REQUIRED
| StatusCode::UNAUTHORIZED => Error::unauthenticated(err),
StatusCode::EXPECTATION_FAILED
| StatusCode::PRECONDITION_FAILED
| StatusCode::PRECONDITION_REQUIRED => Error::failed_precondition(err),
StatusCode::RANGE_NOT_SATISFIABLE => Error::out_of_range(err),
StatusCode::INTERNAL_SERVER_ERROR
| StatusCode::MISDIRECTED_REQUEST
| StatusCode::SERVICE_UNAVAILABLE
| StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS => Error::unavailable(err),
StatusCode::BAD_REQUEST
| StatusCode::HTTP_VERSION_NOT_SUPPORTED
| StatusCode::LENGTH_REQUIRED
| StatusCode::METHOD_NOT_ALLOWED
| StatusCode::NOT_ACCEPTABLE
| StatusCode::PAYLOAD_TOO_LARGE
| StatusCode::REQUEST_HEADER_FIELDS_TOO_LARGE
| StatusCode::UNSUPPORTED_MEDIA_TYPE
| StatusCode::URI_TOO_LONG => Error::invalid_argument(err),
StatusCode::TOO_MANY_REQUESTS => Error::resource_exhausted(err),
StatusCode::NOT_IMPLEMENTED => Error::unimplemented(err),
_ => Error::unknown(err),
}
}
}
}
}
#[derive(Clone)]
pub struct HttpClient {
user_agent: HeaderValue,
proxy: Option<Url>,
https_connector: HttpsConnector<HttpConnector>,
}
impl HttpClient {
pub fn new(proxy: Option<&Url>) -> Self {
let spotify_version = match OS {
"android" | "ios" => SPOTIFY_MOBILE_VERSION.to_owned(),
_ => SPOTIFY_VERSION.to_string(),
};
let spotify_platform = match OS {
"android" => "Android/31",
"ios" => "iOS/15.2.1",
"macos" => "OSX/0",
"windows" => "Win32/0",
_ => "Linux/0",
};
let user_agent_str = &format!(
"Spotify/{} {} ({})",
spotify_version, spotify_platform, VERSION_STRING
);
let user_agent = HeaderValue::from_str(user_agent_str).unwrap_or_else(|err| {
error!("Invalid user agent <{}>: {}", user_agent_str, err);
HeaderValue::from_static(FALLBACK_USER_AGENT)
});
// configuring TLS is expensive and should be done once per process
let https_connector = HttpsConnectorBuilder::new()
.with_native_roots()
.https_or_http()
.enable_http1()
.enable_http2()
.build();
Self {
user_agent,
proxy: proxy.cloned(),
https_connector,
}
}
pub async fn request(&self, req: Request<Body>) -> Result<Response<Body>, Error> {
debug!("Requesting {}", req.uri().to_string());
let request = self.request_fut(req)?;
let response = request.await;
if let Ok(response) = &response {
let code = response.status();
if code != StatusCode::OK {
return Err(HttpClientError::StatusCode(code).into());
}
}
Ok(response?)
}
pub async fn request_body(&self, req: Request<Body>) -> Result<Bytes, Error> {
let response = self.request(req).await?;
Ok(hyper::body::to_bytes(response.into_body()).await?)
}
pub fn request_stream(&self, req: Request<Body>) -> Result<IntoStream<ResponseFuture>, Error> {
Ok(self.request_fut(req)?.into_stream())
}
pub fn request_fut(&self, mut req: Request<Body>) -> Result<ResponseFuture, Error> {
let headers_mut = req.headers_mut();
headers_mut.insert(USER_AGENT, self.user_agent.clone());
let request = if let Some(url) = &self.proxy {
let proxy_uri = url.to_string().parse()?;
let proxy = Proxy::new(Intercept::All, proxy_uri);
let proxy_connector = ProxyConnector::from_proxy(self.https_connector.clone(), proxy)?;
Client::builder().build(proxy_connector).request(req)
} else {
Client::builder()
.http2_adaptive_window(true)
.build(self.https_connector.clone())
.request(req)
};
Ok(request)
}
}

View file

@ -1,26 +0,0 @@
use serde::Deserialize;
use crate::{mercury::MercuryError, session::Session};
#[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Token {
pub access_token: String,
pub expires_in: u32,
pub token_type: String,
pub scope: Vec<String>,
}
pub async fn get_token(
session: &Session,
client_id: &str,
scopes: &str,
) -> Result<Token, MercuryError> {
let url = format!(
"hm://keymaster/token/authenticated?client_id={}&scope={}",
client_id, scopes
);
let response = session.mercury().get(url).await?;
let data = response.payload.first().expect("Empty payload");
serde_json::from_slice(data.as_ref()).map_err(|_| MercuryError)
}

View file

@ -1,27 +1,43 @@
#![allow(clippy::unused_io_amount)]
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate num_derive;
use librespot_protocol as protocol; use librespot_protocol as protocol;
#[macro_use] #[macro_use]
mod component; mod component;
mod apresolve; pub mod apresolve;
pub mod audio_key; pub mod audio_key;
pub mod authentication; pub mod authentication;
pub mod cache; pub mod cache;
pub mod cdn_url;
pub mod channel; pub mod channel;
pub mod config; pub mod config;
mod connection; mod connection;
pub mod date;
#[allow(dead_code)]
mod dealer;
#[doc(hidden)] #[doc(hidden)]
pub mod diffie_hellman; pub mod diffie_hellman;
pub mod keymaster; pub mod error;
pub mod file_id;
mod http_client;
pub mod mercury; pub mod mercury;
pub mod packet;
mod proxytunnel; mod proxytunnel;
pub mod session; pub mod session;
mod socket;
#[allow(dead_code)]
pub mod spclient;
pub mod spotify_id; pub mod spotify_id;
pub mod token;
#[doc(hidden)] #[doc(hidden)]
pub mod util; pub mod util;
pub mod version; pub mod version;
pub use config::SessionConfig;
pub use error::Error;
pub use file_id::FileId;
pub use session::Session;
pub use spotify_id::SpotifyId;

View file

@ -1,9 +1,10 @@
use std::collections::HashMap; use std::{
use std::future::Future; collections::HashMap,
use std::mem; future::Future,
use std::pin::Pin; mem,
use std::task::Context; pin::Pin,
use std::task::Poll; task::{Context, Poll},
};
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
@ -11,8 +12,7 @@ use futures_util::FutureExt;
use protobuf::Message; use protobuf::Message;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use crate::protocol; use crate::{packet::PacketType, protocol, util::SeqGenerator, Error};
use crate::util::SeqGenerator;
mod types; mod types;
pub use self::types::*; pub use self::types::*;
@ -32,18 +32,18 @@ component! {
pub struct MercuryPending { pub struct MercuryPending {
parts: Vec<Vec<u8>>, parts: Vec<Vec<u8>>,
partial: Option<Vec<u8>>, partial: Option<Vec<u8>>,
callback: Option<oneshot::Sender<Result<MercuryResponse, MercuryError>>>, callback: Option<oneshot::Sender<Result<MercuryResponse, Error>>>,
} }
pub struct MercuryFuture<T> { pub struct MercuryFuture<T> {
receiver: oneshot::Receiver<Result<T, MercuryError>>, receiver: oneshot::Receiver<Result<T, Error>>,
} }
impl<T> Future for MercuryFuture<T> { impl<T> Future for MercuryFuture<T> {
type Output = Result<T, MercuryError>; type Output = Result<T, Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> { fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.receiver.poll_unpin(cx).map_err(|_| MercuryError)? self.receiver.poll_unpin(cx)?
} }
} }
@ -54,7 +54,7 @@ impl MercuryManager {
seq seq
} }
fn request(&self, req: MercuryRequest) -> MercuryFuture<MercuryResponse> { fn request(&self, req: MercuryRequest) -> Result<MercuryFuture<MercuryResponse>, Error> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let pending = MercuryPending { let pending = MercuryPending {
@ -71,13 +71,13 @@ impl MercuryManager {
}); });
let cmd = req.method.command(); let cmd = req.method.command();
let data = req.encode(&seq); let data = req.encode(&seq)?;
self.session().send_packet(cmd, data); self.session().send_packet(cmd, data)?;
MercuryFuture { receiver: rx } Ok(MercuryFuture { receiver: rx })
} }
pub fn get<T: Into<String>>(&self, uri: T) -> MercuryFuture<MercuryResponse> { pub fn get<T: Into<String>>(&self, uri: T) -> Result<MercuryFuture<MercuryResponse>, Error> {
self.request(MercuryRequest { self.request(MercuryRequest {
method: MercuryMethod::Get, method: MercuryMethod::Get,
uri: uri.into(), uri: uri.into(),
@ -86,7 +86,11 @@ impl MercuryManager {
}) })
} }
pub fn send<T: Into<String>>(&self, uri: T, data: Vec<u8>) -> MercuryFuture<MercuryResponse> { pub fn send<T: Into<String>>(
&self,
uri: T,
data: Vec<u8>,
) -> Result<MercuryFuture<MercuryResponse>, Error> {
self.request(MercuryRequest { self.request(MercuryRequest {
method: MercuryMethod::Send, method: MercuryMethod::Send,
uri: uri.into(), uri: uri.into(),
@ -102,7 +106,7 @@ impl MercuryManager {
pub fn subscribe<T: Into<String>>( pub fn subscribe<T: Into<String>>(
&self, &self,
uri: T, uri: T,
) -> impl Future<Output = Result<mpsc::UnboundedReceiver<MercuryResponse>, MercuryError>> + 'static ) -> impl Future<Output = Result<mpsc::UnboundedReceiver<MercuryResponse>, Error>> + 'static
{ {
let uri = uri.into(); let uri = uri.into();
let request = self.request(MercuryRequest { let request = self.request(MercuryRequest {
@ -114,7 +118,7 @@ impl MercuryManager {
let manager = self.clone(); let manager = self.clone();
async move { async move {
let response = request.await?; let response = request?.await?;
let (tx, rx) = mpsc::unbounded_channel(); let (tx, rx) = mpsc::unbounded_channel();
@ -124,13 +128,18 @@ impl MercuryManager {
if !response.payload.is_empty() { if !response.payload.is_empty() {
// Old subscription protocol, watch the provided list of URIs // Old subscription protocol, watch the provided list of URIs
for sub in response.payload { for sub in response.payload {
let mut sub = match protocol::pubsub::Subscription::parse_from_bytes(&sub) {
protocol::pubsub::Subscription::parse_from_bytes(&sub).unwrap(); Ok(mut sub) => {
let sub_uri = sub.take_uri(); let sub_uri = sub.take_uri();
debug!("subscribed sub_uri={}", sub_uri); debug!("subscribed sub_uri={}", sub_uri);
inner.subscriptions.push((sub_uri, tx.clone())); inner.subscriptions.push((sub_uri, tx.clone()));
}
Err(e) => {
error!("could not subscribe to {}: {}", uri, e);
}
}
} }
} else { } else {
// New subscription protocol, watch the requested URI // New subscription protocol, watch the requested URI
@ -143,7 +152,28 @@ impl MercuryManager {
} }
} }
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) { pub fn listen_for<T: Into<String>>(
&self,
uri: T,
) -> impl Future<Output = mpsc::UnboundedReceiver<MercuryResponse>> + 'static {
let uri = uri.into();
let manager = self.clone();
async move {
let (tx, rx) = mpsc::unbounded_channel();
manager.lock(move |inner| {
if !inner.invalid {
debug!("listening to uri={}", uri);
inner.subscriptions.push((uri, tx));
}
});
rx
}
}
pub(crate) fn dispatch(&self, cmd: PacketType, mut data: Bytes) -> Result<(), Error> {
let seq_len = BigEndian::read_u16(data.split_to(2).as_ref()) as usize; let seq_len = BigEndian::read_u16(data.split_to(2).as_ref()) as usize;
let seq = data.split_to(seq_len).as_ref().to_owned(); let seq = data.split_to(seq_len).as_ref().to_owned();
@ -154,14 +184,17 @@ impl MercuryManager {
let mut pending = match pending { let mut pending = match pending {
Some(pending) => pending, Some(pending) => pending,
None if cmd == 0xb5 => MercuryPending {
parts: Vec::new(),
partial: None,
callback: None,
},
None => { None => {
warn!("Ignore seq {:?} cmd {:x}", seq, cmd); if let PacketType::MercuryEvent = cmd {
return; MercuryPending {
parts: Vec::new(),
partial: None,
callback: None,
}
} else {
warn!("Ignore seq {:?} cmd {:x}", seq, cmd as u8);
return Err(MercuryError::Command(cmd).into());
}
} }
}; };
@ -180,10 +213,12 @@ impl MercuryManager {
} }
if flags == 0x1 { if flags == 0x1 {
self.complete_request(cmd, pending); self.complete_request(cmd, pending)?;
} else { } else {
self.lock(move |inner| inner.pending.insert(seq, pending)); self.lock(move |inner| inner.pending.insert(seq, pending));
} }
Ok(())
} }
fn parse_part(data: &mut Bytes) -> Vec<u8> { fn parse_part(data: &mut Bytes) -> Vec<u8> {
@ -191,9 +226,9 @@ impl MercuryManager {
data.split_to(size).as_ref().to_owned() data.split_to(size).as_ref().to_owned()
} }
fn complete_request(&self, cmd: u8, mut pending: MercuryPending) { fn complete_request(&self, cmd: PacketType, mut pending: MercuryPending) -> Result<(), Error> {
let header_data = pending.parts.remove(0); let header_data = pending.parts.remove(0);
let header = protocol::mercury::Header::parse_from_bytes(&header_data).unwrap(); let header = protocol::mercury::Header::parse_from_bytes(&header_data)?;
let response = MercuryResponse { let response = MercuryResponse {
uri: header.get_uri().to_string(), uri: header.get_uri().to_string(),
@ -201,29 +236,33 @@ impl MercuryManager {
payload: pending.parts, payload: pending.parts,
}; };
if response.status_code >= 500 { let status_code = response.status_code;
panic!("Spotify servers returned an error. Restart librespot."); if status_code >= 500 {
} else if response.status_code >= 400 { error!("error {} for uri {}", status_code, &response.uri);
warn!("error {} for uri {}", response.status_code, &response.uri); Err(MercuryError::Response(response).into())
} else if status_code >= 400 {
error!("error {} for uri {}", status_code, &response.uri);
if let Some(cb) = pending.callback { if let Some(cb) = pending.callback {
let _ = cb.send(Err(MercuryError)); cb.send(Err(MercuryError::Response(response.clone()).into()))
.map_err(|_| MercuryError::Channel)?;
} }
} else if cmd == 0xb5 { Err(MercuryError::Response(response).into())
} else if let PacketType::MercuryEvent = cmd {
// TODO: This is just a workaround to make utf-8 encoded usernames work.
// A better solution would be to use an uri struct and urlencode it directly
// before sending while saving the subscription under its unencoded form.
let mut uri_split = response.uri.split('/');
let encoded_uri = std::iter::once(uri_split.next().unwrap_or_default().to_string())
.chain(uri_split.map(|component| {
form_urlencoded::byte_serialize(component.as_bytes()).collect::<String>()
}))
.collect::<Vec<String>>()
.join("/");
let mut found = false;
self.lock(|inner| { self.lock(|inner| {
let mut found = false;
// TODO: This is just a workaround to make utf-8 encoded usernames work.
// A better solution would be to use an uri struct and urlencode it directly
// before sending while saving the subscription under its unencoded form.
let mut uri_split = response.uri.split('/');
let encoded_uri = std::iter::once(uri_split.next().unwrap().to_string())
.chain(uri_split.map(|component| {
form_urlencoded::byte_serialize(component.as_bytes()).collect::<String>()
}))
.collect::<Vec<String>>()
.join("/");
inner.subscriptions.retain(|&(ref prefix, ref sub)| { inner.subscriptions.retain(|&(ref prefix, ref sub)| {
if encoded_uri.starts_with(prefix) { if encoded_uri.starts_with(prefix) {
found = true; found = true;
@ -236,13 +275,21 @@ impl MercuryManager {
true true
} }
}); });
});
if !found { if !found {
debug!("unknown subscription uri={}", response.uri); debug!("unknown subscription uri={}", &response.uri);
} trace!("response pushed over Mercury: {:?}", response);
}) Err(MercuryError::Response(response).into())
} else {
Ok(())
}
} else if let Some(cb) = pending.callback { } else if let Some(cb) = pending.callback {
let _ = cb.send(Ok(response)); cb.send(Ok(response)).map_err(|_| MercuryError::Channel)?;
Ok(())
} else {
error!("can't handle Mercury response: {:?}", response);
Err(MercuryError::Response(response).into())
} }
} }

View file

@ -1,6 +1,8 @@
use std::collections::VecDeque; use std::collections::VecDeque;
use super::*; use super::{MercuryFuture, MercuryManager, MercuryResponse};
use crate::Error;
pub struct MercurySender { pub struct MercurySender {
mercury: MercuryManager, mercury: MercuryManager,
@ -23,12 +25,13 @@ impl MercurySender {
self.buffered_future.is_none() && self.pending.is_empty() self.buffered_future.is_none() && self.pending.is_empty()
} }
pub fn send(&mut self, item: Vec<u8>) { pub fn send(&mut self, item: Vec<u8>) -> Result<(), Error> {
let task = self.mercury.send(self.uri.clone(), item); let task = self.mercury.send(self.uri.clone(), item)?;
self.pending.push_back(task); self.pending.push_back(task);
Ok(())
} }
pub async fn flush(&mut self) -> Result<(), MercuryError> { pub async fn flush(&mut self) -> Result<(), Error> {
if self.buffered_future.is_none() { if self.buffered_future.is_none() {
self.buffered_future = self.pending.pop_front(); self.buffered_future = self.pending.pop_front();
} }

View file

@ -1,8 +1,10 @@
use byteorder::{BigEndian, WriteBytesExt};
use protobuf::Message;
use std::io::Write; use std::io::Write;
use crate::protocol; use byteorder::{BigEndian, WriteBytesExt};
use protobuf::Message;
use thiserror::Error;
use crate::{packet::PacketType, protocol, Error};
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum MercuryMethod { pub enum MercuryMethod {
@ -27,8 +29,25 @@ pub struct MercuryResponse {
pub payload: Vec<Vec<u8>>, pub payload: Vec<Vec<u8>>,
} }
#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)] #[derive(Debug, Error)]
pub struct MercuryError; pub enum MercuryError {
#[error("callback receiver was disconnected")]
Channel,
#[error("error handling packet type: {0:?}")]
Command(PacketType),
#[error("error handling Mercury response: {0:?}")]
Response(MercuryResponse),
}
impl From<MercuryError> for Error {
fn from(err: MercuryError) -> Self {
match err {
MercuryError::Channel => Error::aborted(err),
MercuryError::Command(_) => Error::unimplemented(err),
MercuryError::Response(_) => Error::unavailable(err),
}
}
}
impl ToString for MercuryMethod { impl ToString for MercuryMethod {
fn to_string(&self) -> String { fn to_string(&self) -> String {
@ -43,24 +62,23 @@ impl ToString for MercuryMethod {
} }
impl MercuryMethod { impl MercuryMethod {
pub fn command(&self) -> u8 { pub fn command(&self) -> PacketType {
use PacketType::*;
match *self { match *self {
MercuryMethod::Get | MercuryMethod::Send => 0xb2, MercuryMethod::Get | MercuryMethod::Send => MercuryReq,
MercuryMethod::Sub => 0xb3, MercuryMethod::Sub => MercurySub,
MercuryMethod::Unsub => 0xb4, MercuryMethod::Unsub => MercuryUnsub,
} }
} }
} }
impl MercuryRequest { impl MercuryRequest {
pub fn encode(&self, seq: &[u8]) -> Vec<u8> { pub fn encode(&self, seq: &[u8]) -> Result<Vec<u8>, Error> {
let mut packet = Vec::new(); let mut packet = Vec::new();
packet.write_u16::<BigEndian>(seq.len() as u16).unwrap(); packet.write_u16::<BigEndian>(seq.len() as u16)?;
packet.write_all(seq).unwrap(); packet.write_all(seq)?;
packet.write_u8(1).unwrap(); // Flags: FINAL packet.write_u8(1)?; // Flags: FINAL
packet packet.write_u16::<BigEndian>(1 + self.payload.len() as u16)?; // Part count
.write_u16::<BigEndian>(1 + self.payload.len() as u16)
.unwrap(); // Part count
let mut header = protocol::mercury::Header::new(); let mut header = protocol::mercury::Header::new();
header.set_uri(self.uri.clone()); header.set_uri(self.uri.clone());
@ -70,16 +88,14 @@ impl MercuryRequest {
header.set_content_type(content_type.clone()); header.set_content_type(content_type.clone());
} }
packet packet.write_u16::<BigEndian>(header.compute_size() as u16)?;
.write_u16::<BigEndian>(header.compute_size() as u16) header.write_to_writer(&mut packet)?;
.unwrap();
header.write_to_writer(&mut packet).unwrap();
for p in &self.payload { for p in &self.payload {
packet.write_u16::<BigEndian>(p.len() as u16).unwrap(); packet.write_u16::<BigEndian>(p.len() as u16)?;
packet.write(p).unwrap(); packet.write_all(p)?;
} }
packet Ok(packet)
} }
} }

41
core/src/packet.rs Normal file
View file

@ -0,0 +1,41 @@
// Ported from librespot-java. Relicensed under MIT with permission.
use num_derive::{FromPrimitive, ToPrimitive};
#[derive(Debug, Copy, Clone, FromPrimitive, ToPrimitive)]
pub enum PacketType {
SecretBlock = 0x02,
Ping = 0x04,
StreamChunk = 0x08,
StreamChunkRes = 0x09,
ChannelError = 0x0a,
ChannelAbort = 0x0b,
RequestKey = 0x0c,
AesKey = 0x0d,
AesKeyError = 0x0e,
Image = 0x19,
CountryCode = 0x1b,
Pong = 0x49,
PongAck = 0x4a,
Pause = 0x4b,
ProductInfo = 0x50,
LegacyWelcome = 0x69,
LicenseVersion = 0x76,
Login = 0xab,
APWelcome = 0xac,
AuthFailure = 0xad,
MercuryReq = 0xb2,
MercurySub = 0xb3,
MercuryUnsub = 0xb4,
MercuryEvent = 0xb5,
TrackEndedTime = 0x82,
UnknownDataAllZeros = 0x1f,
PreferredLocale = 0x74,
Unknown0x0f = 0x0f,
Unknown0x10 = 0x10,
Unknown0x4f = 0x4f,
// TODO - occurs when subscribing with an empty URI. Maybe a MercuryError?
// Payload: b"\0\x08\0\0\0\0\0\0\0\0\x01\0\x01\0\x03 \xb0\x06"
Unknown0xb6 = 0xb6,
}

View file

@ -1,29 +1,41 @@
use std::future::Future; use std::{
use std::io; collections::HashMap,
use std::pin::Pin; future::Future,
use std::sync::atomic::{AtomicUsize, Ordering}; io,
use std::sync::{Arc, RwLock, Weak}; pin::Pin,
use std::task::Context; process::exit,
use std::task::Poll; sync::{Arc, Weak},
use std::time::{SystemTime, UNIX_EPOCH}; task::{Context, Poll},
time::{SystemTime, UNIX_EPOCH},
};
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
use futures_core::TryStream; use futures_core::TryStream;
use futures_util::{future, ready, StreamExt, TryStreamExt}; use futures_util::{future, ready, StreamExt, TryStreamExt};
use num_traits::FromPrimitive;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use parking_lot::RwLock;
use quick_xml::events::Event;
use thiserror::Error; use thiserror::Error;
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream; use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::apresolve::apresolve; use crate::{
use crate::audio_key::AudioKeyManager; apresolve::ApResolver,
use crate::authentication::Credentials; audio_key::AudioKeyManager,
use crate::cache::Cache; authentication::Credentials,
use crate::channel::ChannelManager; cache::Cache,
use crate::config::SessionConfig; channel::ChannelManager,
use crate::connection::{self, AuthenticationError}; config::SessionConfig,
use crate::mercury::MercuryManager; connection::{self, AuthenticationError},
http_client::HttpClient,
mercury::MercuryManager,
packet::PacketType,
spclient::SpClient,
token::TokenProvider,
Error,
};
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum SessionError { pub enum SessionError {
@ -31,103 +43,119 @@ pub enum SessionError {
AuthenticationError(#[from] AuthenticationError), AuthenticationError(#[from] AuthenticationError),
#[error("Cannot create session: {0}")] #[error("Cannot create session: {0}")]
IoError(#[from] io::Error), IoError(#[from] io::Error),
#[error("Session is not connected")]
NotConnected,
#[error("packet {0} unknown")]
Packet(u8),
} }
impl From<SessionError> for Error {
fn from(err: SessionError) -> Self {
match err {
SessionError::AuthenticationError(_) => Error::unauthenticated(err),
SessionError::IoError(_) => Error::unavailable(err),
SessionError::NotConnected => Error::unavailable(err),
SessionError::Packet(_) => Error::unimplemented(err),
}
}
}
pub type UserAttributes = HashMap<String, String>;
#[derive(Debug, Clone, Default)]
pub struct UserData {
pub country: String,
pub canonical_username: String,
pub attributes: UserAttributes,
}
#[derive(Debug, Clone, Default)]
struct SessionData { struct SessionData {
country: String, client_id: String,
connection_id: String,
time_delta: i64, time_delta: i64,
canonical_username: String,
invalid: bool, invalid: bool,
user_data: UserData,
} }
struct SessionInternal { struct SessionInternal {
config: SessionConfig, config: SessionConfig,
data: RwLock<SessionData>, data: RwLock<SessionData>,
tx_connection: mpsc::UnboundedSender<(u8, Vec<u8>)>, http_client: HttpClient,
tx_connection: OnceCell<mpsc::UnboundedSender<(u8, Vec<u8>)>>,
apresolver: OnceCell<ApResolver>,
audio_key: OnceCell<AudioKeyManager>, audio_key: OnceCell<AudioKeyManager>,
channel: OnceCell<ChannelManager>, channel: OnceCell<ChannelManager>,
mercury: OnceCell<MercuryManager>, mercury: OnceCell<MercuryManager>,
spclient: OnceCell<SpClient>,
token_provider: OnceCell<TokenProvider>,
cache: Option<Arc<Cache>>, cache: Option<Arc<Cache>>,
handle: tokio::runtime::Handle, handle: tokio::runtime::Handle,
session_id: usize,
} }
static SESSION_COUNTER: AtomicUsize = AtomicUsize::new(0);
#[derive(Clone)] #[derive(Clone)]
pub struct Session(Arc<SessionInternal>); pub struct Session(Arc<SessionInternal>);
impl Session { impl Session {
pub async fn connect( pub fn new(config: SessionConfig, cache: Option<Cache>) -> Self {
config: SessionConfig, let http_client = HttpClient::new(config.proxy.as_ref());
credentials: Credentials,
cache: Option<Cache>,
store_credentials: bool,
) -> Result<(Session, Credentials), SessionError> {
let ap = apresolve(config.proxy.as_ref(), config.ap_port).await;
info!("Connecting to AP \"{}\"", ap); debug!("new Session");
let mut conn = connection::connect(ap, config.proxy.as_ref()).await?;
let session_data = SessionData {
client_id: config.client_id.clone(),
..SessionData::default()
};
Self(Arc::new(SessionInternal {
config,
data: RwLock::new(session_data),
http_client,
tx_connection: OnceCell::new(),
cache: cache.map(Arc::new),
apresolver: OnceCell::new(),
audio_key: OnceCell::new(),
channel: OnceCell::new(),
mercury: OnceCell::new(),
spclient: OnceCell::new(),
token_provider: OnceCell::new(),
handle: tokio::runtime::Handle::current(),
}))
}
pub async fn connect(
&self,
credentials: Credentials,
store_credentials: bool,
) -> Result<(), Error> {
let ap = self.apresolver().resolve("accesspoint").await?;
info!("Connecting to AP \"{}:{}\"", ap.0, ap.1);
let mut transport = connection::connect(&ap.0, ap.1, self.config().proxy.as_ref()).await?;
let reusable_credentials = let reusable_credentials =
connection::authenticate(&mut conn, credentials, &config.device_id).await?; connection::authenticate(&mut transport, credentials, &self.config().device_id).await?;
info!("Authenticated as \"{}\" !", reusable_credentials.username); info!("Authenticated as \"{}\" !", reusable_credentials.username);
if let Some(cache) = &cache { self.set_username(&reusable_credentials.username);
if let Some(cache) = self.cache() {
if store_credentials { if store_credentials {
cache.save_credentials(&reusable_credentials); cache.save_credentials(&reusable_credentials);
} }
} }
let session = Session::create( let (tx_connection, rx_connection) = mpsc::unbounded_channel();
conn, self.0
config, .tx_connection
cache, .set(tx_connection)
reusable_credentials.username.clone(), .map_err(|_| SessionError::NotConnected)?;
tokio::runtime::Handle::current(),
);
Ok((session, reusable_credentials))
}
fn create(
transport: connection::Transport,
config: SessionConfig,
cache: Option<Cache>,
username: String,
handle: tokio::runtime::Handle,
) -> Session {
let (sink, stream) = transport.split(); let (sink, stream) = transport.split();
let sender_task = UnboundedReceiverStream::new(rx_connection)
let (sender_tx, sender_rx) = mpsc::unbounded_channel();
let session_id = SESSION_COUNTER.fetch_add(1, Ordering::Relaxed);
debug!("new Session[{}]", session_id);
let session = Session(Arc::new(SessionInternal {
config,
data: RwLock::new(SessionData {
country: String::new(),
canonical_username: username,
invalid: false,
time_delta: 0,
}),
tx_connection: sender_tx,
cache: cache.map(Arc::new),
audio_key: OnceCell::new(),
channel: OnceCell::new(),
mercury: OnceCell::new(),
handle,
session_id,
}));
let sender_task = UnboundedReceiverStream::new(sender_rx)
.map(Ok) .map(Ok)
.forward(sink); .forward(sink);
let receiver_task = DispatchTask(stream, session.weak()); let receiver_task = DispatchTask(stream, self.weak());
tokio::spawn(async move { tokio::spawn(async move {
let result = future::try_join(sender_task, receiver_task).await; let result = future::try_join(sender_task, receiver_task).await;
@ -137,7 +165,13 @@ impl Session {
} }
}); });
session Ok(())
}
pub fn apresolver(&self) -> &ApResolver {
self.0
.apresolver
.get_or_init(|| ApResolver::new(self.weak()))
} }
pub fn audio_key(&self) -> &AudioKeyManager { pub fn audio_key(&self) -> &AudioKeyManager {
@ -152,14 +186,28 @@ impl Session {
.get_or_init(|| ChannelManager::new(self.weak())) .get_or_init(|| ChannelManager::new(self.weak()))
} }
pub fn http_client(&self) -> &HttpClient {
&self.0.http_client
}
pub fn mercury(&self) -> &MercuryManager { pub fn mercury(&self) -> &MercuryManager {
self.0 self.0
.mercury .mercury
.get_or_init(|| MercuryManager::new(self.weak())) .get_or_init(|| MercuryManager::new(self.weak()))
} }
pub fn spclient(&self) -> &SpClient {
self.0.spclient.get_or_init(|| SpClient::new(self.weak()))
}
pub fn token_provider(&self) -> &TokenProvider {
self.0
.token_provider
.get_or_init(|| TokenProvider::new(self.weak()))
}
pub fn time_delta(&self) -> i64 { pub fn time_delta(&self) -> i64 {
self.0.data.read().unwrap().time_delta self.0.data.read().time_delta
} }
pub fn spawn<T>(&self, task: T) pub fn spawn<T>(&self, task: T)
@ -172,17 +220,38 @@ impl Session {
fn debug_info(&self) { fn debug_info(&self) {
debug!( debug!(
"Session[{}] strong={} weak={}", "Session strong={} weak={}",
self.0.session_id,
Arc::strong_count(&self.0), Arc::strong_count(&self.0),
Arc::weak_count(&self.0) Arc::weak_count(&self.0)
); );
} }
#[allow(clippy::match_same_arms)] fn check_catalogue(attributes: &UserAttributes) {
fn dispatch(&self, cmd: u8, data: Bytes) { if let Some(account_type) = attributes.get("type") {
match cmd { if account_type != "premium" {
0x4 => { error!("librespot does not support {:?} accounts.", account_type);
info!("Please support Spotify and your artists and sign up for a premium account.");
// TODO: logout instead of exiting
exit(1);
}
}
}
fn dispatch(&self, cmd: u8, data: Bytes) -> Result<(), Error> {
use PacketType::*;
let packet_type = FromPrimitive::from_u8(cmd);
let cmd = match packet_type {
Some(cmd) => cmd,
None => {
trace!("Ignoring unknown packet {:x}", cmd);
return Err(SessionError::Packet(cmd).into());
}
};
match packet_type {
Some(Ping) => {
let server_timestamp = BigEndian::read_u32(data.as_ref()) as i64; let server_timestamp = BigEndian::read_u32(data.as_ref()) as i64;
let timestamp = match SystemTime::now().duration_since(UNIX_EPOCH) { let timestamp = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(dur) => dur, Ok(dur) => dur,
@ -190,66 +259,170 @@ impl Session {
} }
.as_secs() as i64; .as_secs() as i64;
self.0.data.write().unwrap().time_delta = server_timestamp - timestamp; self.0.data.write().time_delta = server_timestamp - timestamp;
self.debug_info(); self.debug_info();
self.send_packet(0x49, vec![0, 0, 0, 0]); self.send_packet(Pong, vec![0, 0, 0, 0])
} }
0x4a => (), Some(CountryCode) => {
0x1b => { let country = String::from_utf8(data.as_ref().to_owned())?;
let country = String::from_utf8(data.as_ref().to_owned()).unwrap();
info!("Country: {:?}", country); info!("Country: {:?}", country);
self.0.data.write().unwrap().country = country; self.0.data.write().user_data.country = country;
Ok(())
} }
Some(StreamChunkRes) | Some(ChannelError) => self.channel().dispatch(cmd, data),
Some(AesKey) | Some(AesKeyError) => self.audio_key().dispatch(cmd, data),
Some(MercuryReq) | Some(MercurySub) | Some(MercuryUnsub) | Some(MercuryEvent) => {
self.mercury().dispatch(cmd, data)
}
Some(ProductInfo) => {
let data = std::str::from_utf8(&data)?;
let mut reader = quick_xml::Reader::from_str(data);
0x9 | 0xa => self.channel().dispatch(cmd, data), let mut buf = Vec::new();
0xd | 0xe => self.audio_key().dispatch(cmd, data), let mut current_element = String::new();
0xb2..=0xb6 => self.mercury().dispatch(cmd, data), let mut user_attributes: UserAttributes = HashMap::new();
_ => (),
loop {
match reader.read_event(&mut buf) {
Ok(Event::Start(ref element)) => {
current_element = std::str::from_utf8(element.name())?.to_owned()
}
Ok(Event::End(_)) => {
current_element = String::new();
}
Ok(Event::Text(ref value)) => {
if !current_element.is_empty() {
let _ = user_attributes.insert(
current_element.clone(),
value.unescape_and_decode(&reader)?,
);
}
}
Ok(Event::Eof) => break,
Ok(_) => (),
Err(e) => error!(
"Error parsing XML at position {}: {:?}",
reader.buffer_position(),
e
),
}
}
trace!("Received product info: {:#?}", user_attributes);
Self::check_catalogue(&user_attributes);
self.0.data.write().user_data.attributes = user_attributes;
Ok(())
}
Some(PongAck)
| Some(SecretBlock)
| Some(LegacyWelcome)
| Some(UnknownDataAllZeros)
| Some(LicenseVersion) => Ok(()),
_ => {
trace!("Ignoring {:?} packet with data {:#?}", cmd, data);
Err(SessionError::Packet(cmd as u8).into())
}
} }
} }
pub fn send_packet(&self, cmd: u8, data: Vec<u8>) { pub fn send_packet(&self, cmd: PacketType, data: Vec<u8>) -> Result<(), Error> {
self.0.tx_connection.send((cmd, data)).unwrap(); match self.0.tx_connection.get() {
Some(tx) => Ok(tx.send((cmd as u8, data))?),
None => Err(SessionError::NotConnected.into()),
}
} }
pub fn cache(&self) -> Option<&Arc<Cache>> { pub fn cache(&self) -> Option<&Arc<Cache>> {
self.0.cache.as_ref() self.0.cache.as_ref()
} }
fn config(&self) -> &SessionConfig { pub fn config(&self) -> &SessionConfig {
&self.0.config &self.0.config
} }
pub fn username(&self) -> String { // This clones a fairly large struct, so use a specific getter or setter unless
self.0.data.read().unwrap().canonical_username.clone() // you need more fields at once, in which case this can spare multiple `read`
} // locks.
pub fn user_data(&self) -> UserData {
pub fn country(&self) -> String { self.0.data.read().user_data.clone()
self.0.data.read().unwrap().country.clone()
} }
pub fn device_id(&self) -> &str { pub fn device_id(&self) -> &str {
&self.config().device_id &self.config().device_id
} }
pub fn client_id(&self) -> String {
self.0.data.read().client_id.clone()
}
pub fn set_client_id(&self, client_id: &str) {
self.0.data.write().client_id = client_id.to_owned();
}
pub fn connection_id(&self) -> String {
self.0.data.read().connection_id.clone()
}
pub fn set_connection_id(&self, connection_id: &str) {
self.0.data.write().connection_id = connection_id.to_owned();
}
pub fn username(&self) -> String {
self.0.data.read().user_data.canonical_username.clone()
}
pub fn set_username(&self, username: &str) {
self.0.data.write().user_data.canonical_username = username.to_owned();
}
pub fn country(&self) -> String {
self.0.data.read().user_data.country.clone()
}
pub fn set_user_attribute(&self, key: &str, value: &str) -> Option<String> {
let mut dummy_attributes = UserAttributes::new();
dummy_attributes.insert(key.to_owned(), value.to_owned());
Self::check_catalogue(&dummy_attributes);
self.0
.data
.write()
.user_data
.attributes
.insert(key.to_owned(), value.to_owned())
}
pub fn set_user_attributes(&self, attributes: UserAttributes) {
Self::check_catalogue(&attributes);
self.0.data.write().user_data.attributes.extend(attributes)
}
pub fn get_user_attribute(&self, key: &str) -> Option<String> {
self.0
.data
.read()
.user_data
.attributes
.get(key)
.map(Clone::clone)
}
fn weak(&self) -> SessionWeak { fn weak(&self) -> SessionWeak {
SessionWeak(Arc::downgrade(&self.0)) SessionWeak(Arc::downgrade(&self.0))
} }
pub fn session_id(&self) -> usize {
self.0.session_id
}
pub fn shutdown(&self) { pub fn shutdown(&self) {
debug!("Invalidating session[{}]", self.0.session_id); debug!("Invalidating session");
self.0.data.write().unwrap().invalid = true; self.0.data.write().invalid = true;
self.mercury().shutdown(); self.mercury().shutdown();
self.channel().shutdown(); self.channel().shutdown();
} }
pub fn is_invalid(&self) -> bool { pub fn is_invalid(&self) -> bool {
self.0.data.read().unwrap().invalid self.0.data.read().invalid
} }
} }
@ -262,13 +435,14 @@ impl SessionWeak {
} }
pub(crate) fn upgrade(&self) -> Session { pub(crate) fn upgrade(&self) -> Session {
self.try_upgrade().expect("Session died") self.try_upgrade()
.expect("session was dropped and so should have this component")
} }
} }
impl Drop for SessionInternal { impl Drop for SessionInternal {
fn drop(&mut self) { fn drop(&mut self) {
debug!("drop Session[{}]", self.session_id); debug!("drop Session");
} }
} }
@ -303,7 +477,9 @@ where
} }
}; };
session.dispatch(cmd, data); if let Err(e) = session.dispatch(cmd, data) {
error!("could not dispatch command: {}", e);
}
} }
} }
} }

34
core/src/socket.rs Normal file
View file

@ -0,0 +1,34 @@
use std::{io, net::ToSocketAddrs};
use tokio::net::TcpStream;
use url::Url;
use crate::proxytunnel;
pub async fn connect(host: &str, port: u16, proxy: Option<&Url>) -> io::Result<TcpStream> {
let socket = if let Some(proxy_url) = proxy {
info!("Using proxy \"{}\"", proxy_url);
let socket_addr = proxy_url.socket_addrs(|| None).and_then(|addrs| {
addrs.into_iter().next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve proxy server address",
)
})
})?;
let socket = TcpStream::connect(&socket_addr).await?;
proxytunnel::proxy_connect(socket, host, &port.to_string()).await?
} else {
let socket_addr = (host, port).to_socket_addrs()?.next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve access point address",
)
})?;
TcpStream::connect(&socket_addr).await?
};
Ok(socket)
}

496
core/src/spclient.rs Normal file
View file

@ -0,0 +1,496 @@
use std::{
convert::TryInto,
fmt::Write,
time::{Duration, Instant},
};
use bytes::Bytes;
use futures_util::future::IntoStream;
use http::header::HeaderValue;
use hyper::{
client::ResponseFuture,
header::{ACCEPT, AUTHORIZATION, CONTENT_ENCODING, CONTENT_TYPE, RANGE},
Body, HeaderMap, Method, Request,
};
use protobuf::Message;
use rand::Rng;
use thiserror::Error;
use crate::{
apresolve::SocketAddress,
cdn_url::CdnUrl,
error::ErrorKind,
protocol::{
canvaz::EntityCanvazRequest,
clienttoken_http::{ClientTokenRequest, ClientTokenRequestType, ClientTokenResponse},
connect::PutStateRequest,
extended_metadata::BatchedEntityRequest,
},
token::Token,
version, Error, FileId, SpotifyId,
};
component! {
SpClient : SpClientInner {
accesspoint: Option<SocketAddress> = None,
strategy: RequestStrategy = RequestStrategy::default(),
client_token: Option<Token> = None,
}
}
pub type SpClientResult = Result<Bytes, Error>;
#[derive(Debug, Error)]
pub enum SpClientError {
#[error("missing attribute {0}")]
Attribute(String),
}
impl From<SpClientError> for Error {
fn from(err: SpClientError) -> Self {
Self::failed_precondition(err)
}
}
#[derive(Copy, Clone, Debug)]
pub enum RequestStrategy {
TryTimes(usize),
Infinitely,
}
impl Default for RequestStrategy {
fn default() -> Self {
RequestStrategy::TryTimes(10)
}
}
impl SpClient {
pub fn set_strategy(&self, strategy: RequestStrategy) {
self.lock(|inner| inner.strategy = strategy)
}
pub async fn flush_accesspoint(&self) {
self.lock(|inner| inner.accesspoint = None)
}
pub async fn get_accesspoint(&self) -> Result<SocketAddress, Error> {
// Memoize the current access point.
let ap = self.lock(|inner| inner.accesspoint.clone());
let tuple = match ap {
Some(tuple) => tuple,
None => {
let tuple = self.session().apresolver().resolve("spclient").await?;
self.lock(|inner| inner.accesspoint = Some(tuple.clone()));
info!(
"Resolved \"{}:{}\" as spclient access point",
tuple.0, tuple.1
);
tuple
}
};
Ok(tuple)
}
pub async fn base_url(&self) -> Result<String, Error> {
let ap = self.get_accesspoint().await?;
Ok(format!("https://{}:{}", ap.0, ap.1))
}
pub async fn client_token(&self) -> Result<String, Error> {
let client_token = self.lock(|inner| {
if let Some(token) = &inner.client_token {
if token.is_expired() {
inner.client_token = None;
}
}
inner.client_token.clone()
});
if let Some(client_token) = client_token {
return Ok(client_token.access_token);
}
trace!("Client token unavailable or expired, requesting new token.");
let mut message = ClientTokenRequest::new();
message.set_request_type(ClientTokenRequestType::REQUEST_CLIENT_DATA_REQUEST);
let client_data = message.mut_client_data();
client_data.set_client_id(self.session().client_id());
client_data.set_client_version(version::SEMVER.to_string());
let connectivity_data = client_data.mut_connectivity_sdk_data();
connectivity_data.set_device_id(self.session().device_id().to_string());
let platform_data = connectivity_data.mut_platform_specific_data();
match std::env::consts::OS {
"windows" => {
let (pe, image_file) = match std::env::consts::ARCH {
"arm" => (448, 452),
"aarch64" => (43620, 452),
"x86_64" => (34404, 34404),
_ => (332, 332), // x86
};
let windows_data = platform_data.mut_desktop_windows();
windows_data.set_os_version(10);
windows_data.set_os_build(21370);
windows_data.set_platform_id(2);
windows_data.set_unknown_value_6(9);
windows_data.set_image_file_machine(image_file);
windows_data.set_pe_machine(pe);
windows_data.set_unknown_value_10(true);
}
"ios" => {
let ios_data = platform_data.mut_ios();
ios_data.set_user_interface_idiom(0);
ios_data.set_target_iphone_simulator(false);
ios_data.set_hw_machine("iPhone14,5".to_string());
ios_data.set_system_version("15.2.1".to_string());
}
"android" => {
let android_data = platform_data.mut_android();
android_data.set_android_version("12.0.0_r26".to_string());
android_data.set_api_version(31);
android_data.set_device_name("Pixel".to_owned());
android_data.set_model_str("GF5KQ".to_owned());
android_data.set_vendor("Google".to_owned());
}
"macos" => {
let macos_data = platform_data.mut_desktop_macos();
macos_data.set_system_version("Darwin Kernel Version 17.7.0: Fri Oct 30 13:34:27 PDT 2020; root:xnu-4570.71.82.8~1/RELEASE_X86_64".to_string());
macos_data.set_hw_model("iMac21,1".to_string());
macos_data.set_compiled_cpu_type(std::env::consts::ARCH.to_string());
}
_ => {
let linux_data = platform_data.mut_desktop_linux();
linux_data.set_system_name("Linux".to_string());
linux_data.set_system_release("5.4.0-56-generic".to_string());
linux_data
.set_system_version("#62-Ubuntu SMP Mon Nov 23 19:20:19 UTC 2020".to_string());
linux_data.set_hardware(std::env::consts::ARCH.to_string());
}
}
let body = message.write_to_bytes()?;
let request = Request::builder()
.method(&Method::POST)
.uri("https://clienttoken.spotify.com/v1/clienttoken")
.header(ACCEPT, HeaderValue::from_static("application/x-protobuf"))
.header(CONTENT_ENCODING, HeaderValue::from_static(""))
.body(Body::from(body))?;
let response = self.session().http_client().request_body(request).await?;
let message = ClientTokenResponse::parse_from_bytes(&response)?;
let client_token = self.lock(|inner| {
let access_token = message.get_granted_token().get_token().to_owned();
let client_token = Token {
access_token: access_token.clone(),
expires_in: Duration::from_secs(
message
.get_granted_token()
.get_refresh_after_seconds()
.try_into()
.unwrap_or(7200),
),
token_type: "client-token".to_string(),
scopes: message
.get_granted_token()
.get_domains()
.iter()
.map(|d| d.domain.clone())
.collect(),
timestamp: Instant::now(),
};
trace!("Got client token: {:?}", client_token);
inner.client_token = Some(client_token);
access_token
});
Ok(client_token)
}
pub async fn request_with_protobuf(
&self,
method: &Method,
endpoint: &str,
headers: Option<HeaderMap>,
message: &dyn Message,
) -> SpClientResult {
let body = protobuf::text_format::print_to_string(message);
let mut headers = headers.unwrap_or_else(HeaderMap::new);
headers.insert(
CONTENT_TYPE,
HeaderValue::from_static("application/x-protobuf"),
);
self.request(method, endpoint, Some(headers), Some(body))
.await
}
pub async fn request_as_json(
&self,
method: &Method,
endpoint: &str,
headers: Option<HeaderMap>,
body: Option<String>,
) -> SpClientResult {
let mut headers = headers.unwrap_or_else(HeaderMap::new);
headers.insert(ACCEPT, HeaderValue::from_static("application/json"));
self.request(method, endpoint, Some(headers), body).await
}
pub async fn request(
&self,
method: &Method,
endpoint: &str,
headers: Option<HeaderMap>,
body: Option<String>,
) -> SpClientResult {
let mut tries: usize = 0;
let mut last_response;
let body = body.unwrap_or_default();
loop {
tries += 1;
// Reconnection logic: retrieve the endpoint every iteration, so we can try
// another access point when we are experiencing network issues (see below).
let mut url = self.base_url().await?;
url.push_str(endpoint);
// Add metrics. There is also an optional `partner` key with a value like
// `vodafone-uk` but we've yet to discover how we can find that value.
let separator = match url.find('?') {
Some(_) => "&",
None => "?",
};
let _ = write!(url, "{}product=0", separator);
let mut request = Request::builder()
.method(method)
.uri(url)
.body(Body::from(body.clone()))?;
// Reconnection logic: keep getting (cached) tokens because they might have expired.
let token = self
.session()
.token_provider()
.get_token("playlist-read")
.await?;
let headers_mut = request.headers_mut();
if let Some(ref hdrs) = headers {
*headers_mut = hdrs.clone();
}
headers_mut.insert(
AUTHORIZATION,
HeaderValue::from_str(&format!("{} {}", token.token_type, token.access_token,))?,
);
last_response = self.session().http_client().request_body(request).await;
if last_response.is_ok() {
return last_response;
}
// Break before the reconnection logic below, so that the current access point
// is retained when max_tries == 1. Leave it up to the caller when to flush.
if let RequestStrategy::TryTimes(max_tries) = self.lock(|inner| inner.strategy) {
if tries >= max_tries {
break;
}
}
// Reconnection logic: drop the current access point if we are experiencing issues.
// This will cause the next call to base_url() to resolve a new one.
if let Err(ref network_error) = last_response {
match network_error.kind {
ErrorKind::Unavailable | ErrorKind::DeadlineExceeded => {
// Keep trying the current access point three times before dropping it.
if tries % 3 == 0 {
self.flush_accesspoint().await
}
}
_ => break, // if we can't build the request now, then we won't ever
}
}
// When retrying, avoid hammering the Spotify infrastructure by sleeping a while.
// The backoff time is chosen randomly from an ever-increasing range.
let max_seconds = u64::pow(tries as u64, 2) * 3;
let backoff = Duration::from_secs(rand::thread_rng().gen_range(1..=max_seconds));
warn!(
"Unable to complete API request, waiting {} seconds before retrying...",
backoff.as_secs(),
);
debug!("Error was: {:?}", last_response);
tokio::time::sleep(backoff).await;
}
last_response
}
pub async fn put_connect_state(
&self,
connection_id: String,
state: PutStateRequest,
) -> SpClientResult {
let endpoint = format!("/connect-state/v1/devices/{}", self.session().device_id());
let mut headers = HeaderMap::new();
headers.insert("X-Spotify-Connection-Id", connection_id.parse()?);
self.request_with_protobuf(&Method::PUT, &endpoint, Some(headers), &state)
.await
}
pub async fn get_metadata(&self, scope: &str, id: SpotifyId) -> SpClientResult {
let endpoint = format!("/metadata/4/{}/{}", scope, id.to_base16()?);
self.request(&Method::GET, &endpoint, None, None).await
}
pub async fn get_track_metadata(&self, track_id: SpotifyId) -> SpClientResult {
self.get_metadata("track", track_id).await
}
pub async fn get_episode_metadata(&self, episode_id: SpotifyId) -> SpClientResult {
self.get_metadata("episode", episode_id).await
}
pub async fn get_album_metadata(&self, album_id: SpotifyId) -> SpClientResult {
self.get_metadata("album", album_id).await
}
pub async fn get_artist_metadata(&self, artist_id: SpotifyId) -> SpClientResult {
self.get_metadata("artist", artist_id).await
}
pub async fn get_show_metadata(&self, show_id: SpotifyId) -> SpClientResult {
self.get_metadata("show", show_id).await
}
pub async fn get_lyrics(&self, track_id: SpotifyId) -> SpClientResult {
let endpoint = format!("/color-lyrics/v1/track/{}", track_id.to_base62()?);
self.request_as_json(&Method::GET, &endpoint, None, None)
.await
}
pub async fn get_lyrics_for_image(
&self,
track_id: SpotifyId,
image_id: FileId,
) -> SpClientResult {
let endpoint = format!(
"/color-lyrics/v2/track/{}/image/spotify:image:{}",
track_id.to_base62()?,
image_id
);
self.request_as_json(&Method::GET, &endpoint, None, None)
.await
}
// TODO: Find endpoint for newer canvas.proto and upgrade to that.
pub async fn get_canvases(&self, request: EntityCanvazRequest) -> SpClientResult {
let endpoint = "/canvaz-cache/v0/canvases";
self.request_with_protobuf(&Method::POST, endpoint, None, &request)
.await
}
pub async fn get_extended_metadata(&self, request: BatchedEntityRequest) -> SpClientResult {
let endpoint = "/extended-metadata/v0/extended-metadata";
self.request_with_protobuf(&Method::POST, endpoint, None, &request)
.await
}
pub async fn get_audio_storage(&self, file_id: FileId) -> SpClientResult {
let endpoint = format!(
"/storage-resolve/files/audio/interactive/{}",
file_id.to_base16()?
);
self.request(&Method::GET, &endpoint, None, None).await
}
pub fn stream_from_cdn(
&self,
cdn_url: &CdnUrl,
offset: usize,
length: usize,
) -> Result<IntoStream<ResponseFuture>, Error> {
let url = cdn_url.try_get_url()?;
let req = Request::builder()
.method(&Method::GET)
.uri(url)
.header(
RANGE,
HeaderValue::from_str(&format!("bytes={}-{}", offset, offset + length - 1))?,
)
.body(Body::empty())?;
let stream = self.session().http_client().request_stream(req)?;
Ok(stream)
}
pub async fn request_url(&self, url: String) -> SpClientResult {
let request = Request::builder()
.method(&Method::GET)
.uri(url)
.body(Body::empty())?;
self.session().http_client().request_body(request).await
}
// Audio preview in 96 kbps MP3, unencrypted
pub async fn get_audio_preview(&self, preview_id: &FileId) -> SpClientResult {
let attribute = "audio-preview-url-template";
let template = self
.session()
.get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let mut url = template.replace("{id}", &preview_id.to_base16()?);
let separator = match url.find('?') {
Some(_) => "&",
None => "?",
};
let _ = write!(url, "{}cid={}", separator, self.session().client_id());
self.request_url(url).await
}
// The first 128 kB of a track, unencrypted
pub async fn get_head_file(&self, file_id: FileId) -> SpClientResult {
let attribute = "head-files-url";
let template = self
.session()
.get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let url = template.replace("{file_id}", &file_id.to_base16()?);
self.request_url(url).await
}
pub async fn get_image(&self, image_id: FileId) -> SpClientResult {
let attribute = "image-url";
let template = self
.session()
.get_user_attribute(attribute)
.ok_or_else(|| SpClientError::Attribute(attribute.to_string()))?;
let url = template.replace("{file_id}", &image_id.to_base16()?);
self.request_url(url).await
}
}

View file

@ -1,44 +1,81 @@
#![allow(clippy::wrong_self_convention)] use std::{
convert::{TryFrom, TryInto},
fmt,
ops::Deref,
};
use std::convert::TryInto; use thiserror::Error;
use std::fmt;
use std::string::FromUtf8Error; use crate::Error;
use librespot_protocol as protocol;
// re-export FileId for historic reasons, when it was part of this mod
pub use crate::FileId;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum SpotifyAudioType { pub enum SpotifyItemType {
Album,
Artist,
Episode,
Playlist,
Show,
Track, Track,
Podcast, Unknown,
NonPlayable,
} }
impl From<&str> for SpotifyAudioType { impl From<&str> for SpotifyItemType {
fn from(v: &str) -> Self { fn from(v: &str) -> Self {
match v { match v {
"track" => SpotifyAudioType::Track, "album" => Self::Album,
"episode" => SpotifyAudioType::Podcast, "artist" => Self::Artist,
_ => SpotifyAudioType::NonPlayable, "episode" => Self::Episode,
"playlist" => Self::Playlist,
"show" => Self::Show,
"track" => Self::Track,
_ => Self::Unknown,
} }
} }
} }
impl From<SpotifyAudioType> for &str { impl From<SpotifyItemType> for &str {
fn from(audio_type: SpotifyAudioType) -> &'static str { fn from(item_type: SpotifyItemType) -> &'static str {
match audio_type { match item_type {
SpotifyAudioType::Track => "track", SpotifyItemType::Album => "album",
SpotifyAudioType::Podcast => "episode", SpotifyItemType::Artist => "artist",
SpotifyAudioType::NonPlayable => "unknown", SpotifyItemType::Episode => "episode",
SpotifyItemType::Playlist => "playlist",
SpotifyItemType::Show => "show",
SpotifyItemType::Track => "track",
_ => "unknown",
} }
} }
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpotifyId { pub struct SpotifyId {
pub id: u128, pub id: u128,
pub audio_type: SpotifyAudioType, pub item_type: SpotifyItemType,
} }
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Error, Clone, Copy, PartialEq, Eq)]
pub struct SpotifyIdError; pub enum SpotifyIdError {
#[error("ID cannot be parsed")]
InvalidId,
#[error("not a valid Spotify URI")]
InvalidFormat,
#[error("URI does not belong to Spotify")]
InvalidRoot,
}
impl From<SpotifyIdError> for Error {
fn from(err: SpotifyIdError) -> Self {
Error::invalid_argument(err)
}
}
pub type SpotifyIdResult = Result<SpotifyId, Error>;
pub type NamedSpotifyIdResult = Result<NamedSpotifyId, Error>;
const BASE62_DIGITS: &[u8; 62] = b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; const BASE62_DIGITS: &[u8; 62] = b"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
const BASE16_DIGITS: &[u8; 16] = b"0123456789abcdef"; const BASE16_DIGITS: &[u8; 16] = b"0123456789abcdef";
@ -48,11 +85,12 @@ impl SpotifyId {
const SIZE_BASE16: usize = 32; const SIZE_BASE16: usize = 32;
const SIZE_BASE62: usize = 22; const SIZE_BASE62: usize = 22;
fn track(n: u128) -> SpotifyId { /// Returns whether this `SpotifyId` is for a playable audio item, if known.
SpotifyId { pub fn is_playable(&self) -> bool {
id: n, return matches!(
audio_type: SpotifyAudioType::Track, self.item_type,
} SpotifyItemType::Episode | SpotifyItemType::Track
);
} }
/// Parses a base16 (hex) encoded [Spotify ID] into a `SpotifyId`. /// Parses a base16 (hex) encoded [Spotify ID] into a `SpotifyId`.
@ -60,29 +98,32 @@ impl SpotifyId {
/// `src` is expected to be 32 bytes long and encoded using valid characters. /// `src` is expected to be 32 bytes long and encoded using valid characters.
/// ///
/// [Spotify ID]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [Spotify ID]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn from_base16(src: &str) -> Result<SpotifyId, SpotifyIdError> { pub fn from_base16(src: &str) -> SpotifyIdResult {
let mut dst: u128 = 0; let mut dst: u128 = 0;
for c in src.as_bytes() { for c in src.as_bytes() {
let p = match c { let p = match c {
b'0'..=b'9' => c - b'0', b'0'..=b'9' => c - b'0',
b'a'..=b'f' => c - b'a' + 10, b'a'..=b'f' => c - b'a' + 10,
_ => return Err(SpotifyIdError), _ => return Err(SpotifyIdError::InvalidId.into()),
} as u128; } as u128;
dst <<= 4; dst <<= 4;
dst += p; dst += p;
} }
Ok(SpotifyId::track(dst)) Ok(Self {
id: dst,
item_type: SpotifyItemType::Unknown,
})
} }
/// Parses a base62 encoded [Spotify ID] into a `SpotifyId`. /// Parses a base62 encoded [Spotify ID] into a `u128`.
/// ///
/// `src` is expected to be 22 bytes long and encoded using valid characters. /// `src` is expected to be 22 bytes long and encoded using valid characters.
/// ///
/// [Spotify ID]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [Spotify ID]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn from_base62(src: &str) -> Result<SpotifyId, SpotifyIdError> { pub fn from_base62(src: &str) -> SpotifyIdResult {
let mut dst: u128 = 0; let mut dst: u128 = 0;
for c in src.as_bytes() { for c in src.as_bytes() {
@ -90,23 +131,29 @@ impl SpotifyId {
b'0'..=b'9' => c - b'0', b'0'..=b'9' => c - b'0',
b'a'..=b'z' => c - b'a' + 10, b'a'..=b'z' => c - b'a' + 10,
b'A'..=b'Z' => c - b'A' + 36, b'A'..=b'Z' => c - b'A' + 36,
_ => return Err(SpotifyIdError), _ => return Err(SpotifyIdError::InvalidId.into()),
} as u128; } as u128;
dst *= 62; dst *= 62;
dst += p; dst += p;
} }
Ok(SpotifyId::track(dst)) Ok(Self {
id: dst,
item_type: SpotifyItemType::Unknown,
})
} }
/// Creates a `SpotifyId` from a copy of `SpotifyId::SIZE` (16) bytes in big-endian order. /// Creates a `u128` from a copy of `SpotifyId::SIZE` (16) bytes in big-endian order.
/// ///
/// The resulting `SpotifyId` will default to a `SpotifyAudioType::TRACK`. /// The resulting `SpotifyId` will default to a `SpotifyItemType::Unknown`.
pub fn from_raw(src: &[u8]) -> Result<SpotifyId, SpotifyIdError> { pub fn from_raw(src: &[u8]) -> SpotifyIdResult {
match src.try_into() { match src.try_into() {
Ok(dst) => Ok(SpotifyId::track(u128::from_be_bytes(dst))), Ok(dst) => Ok(Self {
Err(_) => Err(SpotifyIdError), id: u128::from_be_bytes(dst),
item_type: SpotifyItemType::Unknown,
}),
Err(_) => Err(SpotifyIdError::InvalidId.into()),
} }
} }
@ -115,37 +162,46 @@ impl SpotifyId {
/// `uri` is expected to be in the canonical form `spotify:{type}:{id}`, where `{type}` /// `uri` is expected to be in the canonical form `spotify:{type}:{id}`, where `{type}`
/// can be arbitrary while `{id}` is a 22-character long, base62 encoded Spotify ID. /// can be arbitrary while `{id}` is a 22-character long, base62 encoded Spotify ID.
/// ///
/// Note that this should not be used for playlists, which have the form of
/// `spotify:user:{owner_username}:playlist:{id}`.
///
/// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn from_uri(src: &str) -> Result<SpotifyId, SpotifyIdError> { pub fn from_uri(src: &str) -> SpotifyIdResult {
let src = src.strip_prefix("spotify:").ok_or(SpotifyIdError)?; let mut uri_parts: Vec<&str> = src.split(':').collect();
if src.len() <= SpotifyId::SIZE_BASE62 { // At minimum, should be `spotify:{type}:{id}`
return Err(SpotifyIdError); if uri_parts.len() < 3 {
return Err(SpotifyIdError::InvalidFormat.into());
} }
let colon_index = src.len() - SpotifyId::SIZE_BASE62 - 1; if uri_parts[0] != "spotify" {
return Err(SpotifyIdError::InvalidRoot.into());
if src.as_bytes()[colon_index] != b':' {
return Err(SpotifyIdError);
} }
let mut id = SpotifyId::from_base62(&src[colon_index + 1..])?; let id = uri_parts.pop().unwrap_or_default();
id.audio_type = src[..colon_index].into(); if id.len() != Self::SIZE_BASE62 {
return Err(SpotifyIdError::InvalidId.into());
}
Ok(id) Ok(Self {
item_type: uri_parts.pop().unwrap_or_default().into(),
..Self::from_base62(id)?
})
} }
/// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE16` (32) /// Returns the `SpotifyId` as a base16 (hex) encoded, `SpotifyId::SIZE_BASE16` (32)
/// character long `String`. /// character long `String`.
pub fn to_base16(&self) -> Result<String, FromUtf8Error> { #[allow(clippy::wrong_self_convention)]
to_base16(&self.to_raw(), &mut [0u8; SpotifyId::SIZE_BASE16]) pub fn to_base16(&self) -> Result<String, Error> {
to_base16(&self.to_raw(), &mut [0u8; Self::SIZE_BASE16])
} }
/// Returns the `SpotifyId` as a [canonically] base62 encoded, `SpotifyId::SIZE_BASE62` (22) /// Returns the `SpotifyId` as a [canonically] base62 encoded, `SpotifyId::SIZE_BASE62` (22)
/// character long `String`. /// character long `String`.
/// ///
/// [canonically]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [canonically]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn to_base62(&self) -> Result<String, FromUtf8Error> { #[allow(clippy::wrong_self_convention)]
pub fn to_base62(&self) -> Result<String, Error> {
let mut dst = [0u8; 22]; let mut dst = [0u8; 22];
let mut i = 0; let mut i = 0;
let n = self.id; let n = self.id;
@ -183,12 +239,13 @@ impl SpotifyId {
dst.reverse(); dst.reverse();
String::from_utf8(dst.to_vec()) String::from_utf8(dst.to_vec()).map_err(|_| SpotifyIdError::InvalidId.into())
} }
/// Returns a copy of the `SpotifyId` as an array of `SpotifyId::SIZE` (16) bytes in /// Returns a copy of the `SpotifyId` as an array of `SpotifyId::SIZE` (16) bytes in
/// big-endian order. /// big-endian order.
pub fn to_raw(&self) -> [u8; SpotifyId::SIZE] { #[allow(clippy::wrong_self_convention)]
pub fn to_raw(&self) -> [u8; Self::SIZE] {
self.id.to_be_bytes() self.id.to_be_bytes()
} }
@ -200,44 +257,260 @@ impl SpotifyId {
/// be encoded as `unknown`. /// be encoded as `unknown`.
/// ///
/// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids /// [Spotify URI]: https://developer.spotify.com/documentation/web-api/#spotify-uris-and-ids
pub fn to_uri(&self) -> Result<String, FromUtf8Error> { #[allow(clippy::wrong_self_convention)]
pub fn to_uri(&self) -> Result<String, Error> {
// 8 chars for the "spotify:" prefix + 1 colon + 22 chars base62 encoded ID = 31 // 8 chars for the "spotify:" prefix + 1 colon + 22 chars base62 encoded ID = 31
// + unknown size audio_type. // + unknown size item_type.
let audio_type: &str = self.audio_type.into(); let item_type: &str = self.item_type.into();
let mut dst = String::with_capacity(31 + audio_type.len()); let mut dst = String::with_capacity(31 + item_type.len());
dst.push_str("spotify:"); dst.push_str("spotify:");
dst.push_str(audio_type); dst.push_str(item_type);
dst.push(':'); dst.push(':');
let base62 = self.to_base62()?; let base_62 = self.to_base62()?;
dst.push_str(&base62); dst.push_str(&base_62);
Ok(dst) Ok(dst)
} }
} }
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] impl fmt::Debug for SpotifyId {
pub struct FileId(pub [u8; 20]);
impl FileId {
pub fn to_base16(&self) -> Result<String, FromUtf8Error> {
to_base16(&self.0, &mut [0u8; 40])
}
}
impl fmt::Debug for FileId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("FileId").field(&self.to_base16()).finish() f.debug_tuple("SpotifyId")
.field(&self.to_uri().unwrap_or_else(|_| "invalid uri".into()))
.finish()
} }
} }
impl fmt::Display for FileId { impl fmt::Display for SpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_base16().unwrap_or_default()) f.write_str(&self.to_uri().unwrap_or_else(|_| "invalid uri".into()))
} }
} }
#[inline] #[derive(Clone, PartialEq, Eq, Hash)]
fn to_base16(src: &[u8], buf: &mut [u8]) -> Result<String, FromUtf8Error> { pub struct NamedSpotifyId {
pub inner_id: SpotifyId,
pub username: String,
}
impl NamedSpotifyId {
pub fn from_uri(src: &str) -> NamedSpotifyIdResult {
let uri_parts: Vec<&str> = src.split(':').collect();
// At minimum, should be `spotify:user:{username}:{type}:{id}`
if uri_parts.len() < 5 {
return Err(SpotifyIdError::InvalidFormat.into());
}
if uri_parts[0] != "spotify" {
return Err(SpotifyIdError::InvalidRoot.into());
}
if uri_parts[1] != "user" {
return Err(SpotifyIdError::InvalidFormat.into());
}
Ok(Self {
inner_id: SpotifyId::from_uri(src)?,
username: uri_parts[2].to_owned(),
})
}
pub fn to_uri(&self) -> Result<String, Error> {
let item_type: &str = self.inner_id.item_type.into();
let mut dst = String::with_capacity(37 + self.username.len() + item_type.len());
dst.push_str("spotify:user:");
dst.push_str(&self.username);
dst.push_str(item_type);
dst.push(':');
let base_62 = self.to_base62()?;
dst.push_str(&base_62);
Ok(dst)
}
pub fn from_spotify_id(id: SpotifyId, username: String) -> Self {
Self {
inner_id: id,
username,
}
}
}
impl Deref for NamedSpotifyId {
type Target = SpotifyId;
fn deref(&self) -> &Self::Target {
&self.inner_id
}
}
impl fmt::Debug for NamedSpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("NamedSpotifyId")
.field(
&self
.inner_id
.to_uri()
.unwrap_or_else(|_| "invalid id".into()),
)
.finish()
}
}
impl fmt::Display for NamedSpotifyId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(
&self
.inner_id
.to_uri()
.unwrap_or_else(|_| "invalid id".into()),
)
}
}
impl TryFrom<&[u8]> for SpotifyId {
type Error = crate::Error;
fn try_from(src: &[u8]) -> Result<Self, Self::Error> {
Self::from_raw(src)
}
}
impl TryFrom<&str> for SpotifyId {
type Error = crate::Error;
fn try_from(src: &str) -> Result<Self, Self::Error> {
Self::from_base62(src)
}
}
impl TryFrom<String> for SpotifyId {
type Error = crate::Error;
fn try_from(src: String) -> Result<Self, Self::Error> {
Self::try_from(src.as_str())
}
}
impl TryFrom<&Vec<u8>> for SpotifyId {
type Error = crate::Error;
fn try_from(src: &Vec<u8>) -> Result<Self, Self::Error> {
Self::try_from(src.as_slice())
}
}
impl TryFrom<&protocol::spirc::TrackRef> for SpotifyId {
type Error = crate::Error;
fn try_from(track: &protocol::spirc::TrackRef) -> Result<Self, Self::Error> {
match SpotifyId::from_raw(track.get_gid()) {
Ok(mut id) => {
id.item_type = SpotifyItemType::Track;
Ok(id)
}
Err(_) => SpotifyId::from_uri(track.get_uri()),
}
}
}
impl TryFrom<&protocol::metadata::Album> for SpotifyId {
type Error = crate::Error;
fn try_from(album: &protocol::metadata::Album) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Album,
..Self::from_raw(album.get_gid())?
})
}
}
impl TryFrom<&protocol::metadata::Artist> for SpotifyId {
type Error = crate::Error;
fn try_from(artist: &protocol::metadata::Artist) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Artist,
..Self::from_raw(artist.get_gid())?
})
}
}
impl TryFrom<&protocol::metadata::Episode> for SpotifyId {
type Error = crate::Error;
fn try_from(episode: &protocol::metadata::Episode) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Episode,
..Self::from_raw(episode.get_gid())?
})
}
}
impl TryFrom<&protocol::metadata::Track> for SpotifyId {
type Error = crate::Error;
fn try_from(track: &protocol::metadata::Track) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Track,
..Self::from_raw(track.get_gid())?
})
}
}
impl TryFrom<&protocol::metadata::Show> for SpotifyId {
type Error = crate::Error;
fn try_from(show: &protocol::metadata::Show) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Show,
..Self::from_raw(show.get_gid())?
})
}
}
impl TryFrom<&protocol::metadata::ArtistWithRole> for SpotifyId {
type Error = crate::Error;
fn try_from(artist: &protocol::metadata::ArtistWithRole) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Artist,
..Self::from_raw(artist.get_artist_gid())?
})
}
}
impl TryFrom<&protocol::playlist4_external::Item> for SpotifyId {
type Error = crate::Error;
fn try_from(item: &protocol::playlist4_external::Item) -> Result<Self, Self::Error> {
Ok(Self {
item_type: SpotifyItemType::Track,
..Self::from_uri(item.get_uri())?
})
}
}
// Note that this is the unique revision of an item's metadata on a playlist,
// not the ID of that item or playlist.
impl TryFrom<&protocol::playlist4_external::MetaItem> for SpotifyId {
type Error = crate::Error;
fn try_from(item: &protocol::playlist4_external::MetaItem) -> Result<Self, Self::Error> {
Self::try_from(item.get_revision())
}
}
// Note that this is the unique revision of a playlist, not the ID of that playlist.
impl TryFrom<&protocol::playlist4_external::SelectedListContent> for SpotifyId {
type Error = crate::Error;
fn try_from(
playlist: &protocol::playlist4_external::SelectedListContent,
) -> Result<Self, Self::Error> {
Self::try_from(playlist.get_revision())
}
}
// TODO: check meaning and format of this field in the wild. This might be a FileId,
// which is why we now don't create a separate `Playlist` enum value yet and choose
// to discard any item type.
impl TryFrom<&protocol::playlist_annotate3::TranscodedPicture> for SpotifyId {
type Error = crate::Error;
fn try_from(
picture: &protocol::playlist_annotate3::TranscodedPicture,
) -> Result<Self, Self::Error> {
Self::from_base62(picture.get_uri())
}
}
pub fn to_base16(src: &[u8], buf: &mut [u8]) -> Result<String, Error> {
let mut i = 0; let mut i = 0;
for v in src { for v in src {
buf[i] = BASE16_DIGITS[(v >> 4) as usize]; buf[i] = BASE16_DIGITS[(v >> 4) as usize];
@ -245,7 +518,7 @@ fn to_base16(src: &[u8], buf: &mut [u8]) -> Result<String, FromUtf8Error> {
i += 2; i += 2;
} }
String::from_utf8(buf.to_vec()) String::from_utf8(buf.to_vec()).map_err(|_| SpotifyIdError::InvalidId.into())
} }
#[cfg(test)] #[cfg(test)]
@ -254,7 +527,7 @@ mod tests {
struct ConversionCase { struct ConversionCase {
id: u128, id: u128,
kind: SpotifyAudioType, kind: SpotifyItemType,
uri: &'static str, uri: &'static str,
base16: &'static str, base16: &'static str,
base62: &'static str, base62: &'static str,
@ -264,7 +537,7 @@ mod tests {
static CONV_VALID: [ConversionCase; 4] = [ static CONV_VALID: [ConversionCase; 4] = [
ConversionCase { ConversionCase {
id: 238762092608182713602505436543891614649, id: 238762092608182713602505436543891614649,
kind: SpotifyAudioType::Track, kind: SpotifyItemType::Track,
uri: "spotify:track:5sWHDYs0csV6RS48xBl0tH", uri: "spotify:track:5sWHDYs0csV6RS48xBl0tH",
base16: "b39fe8081e1f4c54be38e8d6f9f12bb9", base16: "b39fe8081e1f4c54be38e8d6f9f12bb9",
base62: "5sWHDYs0csV6RS48xBl0tH", base62: "5sWHDYs0csV6RS48xBl0tH",
@ -274,7 +547,7 @@ mod tests {
}, },
ConversionCase { ConversionCase {
id: 204841891221366092811751085145916697048, id: 204841891221366092811751085145916697048,
kind: SpotifyAudioType::Track, kind: SpotifyItemType::Track,
uri: "spotify:track:4GNcXTGWmnZ3ySrqvol3o4", uri: "spotify:track:4GNcXTGWmnZ3ySrqvol3o4",
base16: "9a1b1cfbc6f244569ae0356c77bbe9d8", base16: "9a1b1cfbc6f244569ae0356c77bbe9d8",
base62: "4GNcXTGWmnZ3ySrqvol3o4", base62: "4GNcXTGWmnZ3ySrqvol3o4",
@ -284,7 +557,7 @@ mod tests {
}, },
ConversionCase { ConversionCase {
id: 204841891221366092811751085145916697048, id: 204841891221366092811751085145916697048,
kind: SpotifyAudioType::Podcast, kind: SpotifyItemType::Episode,
uri: "spotify:episode:4GNcXTGWmnZ3ySrqvol3o4", uri: "spotify:episode:4GNcXTGWmnZ3ySrqvol3o4",
base16: "9a1b1cfbc6f244569ae0356c77bbe9d8", base16: "9a1b1cfbc6f244569ae0356c77bbe9d8",
base62: "4GNcXTGWmnZ3ySrqvol3o4", base62: "4GNcXTGWmnZ3ySrqvol3o4",
@ -294,8 +567,8 @@ mod tests {
}, },
ConversionCase { ConversionCase {
id: 204841891221366092811751085145916697048, id: 204841891221366092811751085145916697048,
kind: SpotifyAudioType::NonPlayable, kind: SpotifyItemType::Show,
uri: "spotify:unknown:4GNcXTGWmnZ3ySrqvol3o4", uri: "spotify:show:4GNcXTGWmnZ3ySrqvol3o4",
base16: "9a1b1cfbc6f244569ae0356c77bbe9d8", base16: "9a1b1cfbc6f244569ae0356c77bbe9d8",
base62: "4GNcXTGWmnZ3ySrqvol3o4", base62: "4GNcXTGWmnZ3ySrqvol3o4",
raw: &[ raw: &[
@ -307,7 +580,7 @@ mod tests {
static CONV_INVALID: [ConversionCase; 3] = [ static CONV_INVALID: [ConversionCase; 3] = [
ConversionCase { ConversionCase {
id: 0, id: 0,
kind: SpotifyAudioType::NonPlayable, kind: SpotifyItemType::Unknown,
// Invalid ID in the URI. // Invalid ID in the URI.
uri: "spotify:arbitrarywhatever:5sWHDYs0Bl0tH", uri: "spotify:arbitrarywhatever:5sWHDYs0Bl0tH",
base16: "ZZZZZ8081e1f4c54be38e8d6f9f12bb9", base16: "ZZZZZ8081e1f4c54be38e8d6f9f12bb9",
@ -319,7 +592,7 @@ mod tests {
}, },
ConversionCase { ConversionCase {
id: 0, id: 0,
kind: SpotifyAudioType::NonPlayable, kind: SpotifyItemType::Unknown,
// Missing colon between ID and type. // Missing colon between ID and type.
uri: "spotify:arbitrarywhatever5sWHDYs0csV6RS48xBl0tH", uri: "spotify:arbitrarywhatever5sWHDYs0csV6RS48xBl0tH",
base16: "--------------------", base16: "--------------------",
@ -331,7 +604,7 @@ mod tests {
}, },
ConversionCase { ConversionCase {
id: 0, id: 0,
kind: SpotifyAudioType::NonPlayable, kind: SpotifyItemType::Unknown,
// Uri too short // Uri too short
uri: "spotify:azb:aRS48xBl0tH", uri: "spotify:azb:aRS48xBl0tH",
base16: "--------------------", base16: "--------------------",
@ -350,7 +623,7 @@ mod tests {
} }
for c in &CONV_INVALID { for c in &CONV_INVALID {
assert_eq!(SpotifyId::from_base62(c.base62), Err(SpotifyIdError)); assert!(SpotifyId::from_base62(c.base62).is_err(),);
} }
} }
@ -359,7 +632,7 @@ mod tests {
for c in &CONV_VALID { for c in &CONV_VALID {
let id = SpotifyId { let id = SpotifyId {
id: c.id, id: c.id,
audio_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_base62().unwrap(), c.base62); assert_eq!(id.to_base62().unwrap(), c.base62);
@ -373,7 +646,7 @@ mod tests {
} }
for c in &CONV_INVALID { for c in &CONV_INVALID {
assert_eq!(SpotifyId::from_base16(c.base16), Err(SpotifyIdError)); assert!(SpotifyId::from_base16(c.base16).is_err(),);
} }
} }
@ -382,7 +655,7 @@ mod tests {
for c in &CONV_VALID { for c in &CONV_VALID {
let id = SpotifyId { let id = SpotifyId {
id: c.id, id: c.id,
audio_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_base16().unwrap(), c.base16); assert_eq!(id.to_base16().unwrap(), c.base16);
@ -395,11 +668,11 @@ mod tests {
let actual = SpotifyId::from_uri(c.uri).unwrap(); let actual = SpotifyId::from_uri(c.uri).unwrap();
assert_eq!(actual.id, c.id); assert_eq!(actual.id, c.id);
assert_eq!(actual.audio_type, c.kind); assert_eq!(actual.item_type, c.kind);
} }
for c in &CONV_INVALID { for c in &CONV_INVALID {
assert_eq!(SpotifyId::from_uri(c.uri), Err(SpotifyIdError)); assert!(SpotifyId::from_uri(c.uri).is_err());
} }
} }
@ -408,7 +681,7 @@ mod tests {
for c in &CONV_VALID { for c in &CONV_VALID {
let id = SpotifyId { let id = SpotifyId {
id: c.id, id: c.id,
audio_type: c.kind, item_type: c.kind,
}; };
assert_eq!(id.to_uri().unwrap(), c.uri); assert_eq!(id.to_uri().unwrap(), c.uri);
@ -422,7 +695,7 @@ mod tests {
} }
for c in &CONV_INVALID { for c in &CONV_INVALID {
assert_eq!(SpotifyId::from_raw(c.raw), Err(SpotifyIdError)); assert!(SpotifyId::from_raw(c.raw).is_err());
} }
} }
} }

138
core/src/token.rs Normal file
View file

@ -0,0 +1,138 @@
// Ported from librespot-java. Relicensed under MIT with permission.
// Known scopes:
// ugc-image-upload, playlist-read-collaborative, playlist-modify-private,
// playlist-modify-public, playlist-read-private, user-read-playback-position,
// user-read-recently-played, user-top-read, user-modify-playback-state,
// user-read-currently-playing, user-read-playback-state, user-read-private, user-read-email,
// user-library-modify, user-library-read, user-follow-modify, user-follow-read, streaming,
// app-remote-control
use std::time::{Duration, Instant};
use serde::Deserialize;
use thiserror::Error;
use crate::Error;
component! {
TokenProvider : TokenProviderInner {
tokens: Vec<Token> = vec![],
}
}
#[derive(Debug, Error)]
pub enum TokenError {
#[error("no tokens available")]
Empty,
}
impl From<TokenError> for Error {
fn from(err: TokenError) -> Self {
Error::unavailable(err)
}
}
#[derive(Clone, Debug)]
pub struct Token {
pub access_token: String,
pub expires_in: Duration,
pub token_type: String,
pub scopes: Vec<String>,
pub timestamp: Instant,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct TokenData {
access_token: String,
expires_in: u64,
token_type: String,
scope: Vec<String>,
}
impl TokenProvider {
fn find_token(&self, scopes: Vec<&str>) -> Option<usize> {
self.lock(|inner| {
for i in 0..inner.tokens.len() {
if inner.tokens[i].in_scopes(scopes.clone()) {
return Some(i);
}
}
None
})
}
// scopes must be comma-separated
pub async fn get_token(&self, scopes: &str) -> Result<Token, Error> {
let client_id = self.session().client_id();
if client_id.is_empty() {
return Err(Error::invalid_argument("Client ID cannot be empty"));
}
if let Some(index) = self.find_token(scopes.split(',').collect()) {
let cached_token = self.lock(|inner| inner.tokens[index].clone());
if cached_token.is_expired() {
self.lock(|inner| inner.tokens.remove(index));
} else {
return Ok(cached_token);
}
}
trace!(
"Requested token in scopes {:?} unavailable or expired, requesting new token.",
scopes
);
let query_uri = format!(
"hm://keymaster/token/authenticated?scope={}&client_id={}&device_id={}",
scopes,
client_id,
self.session().device_id(),
);
let request = self.session().mercury().get(query_uri)?;
let response = request.await?;
let data = response.payload.first().ok_or(TokenError::Empty)?.to_vec();
let token = Token::from_json(String::from_utf8(data)?)?;
trace!("Got token: {:#?}", token);
self.lock(|inner| inner.tokens.push(token.clone()));
Ok(token)
}
}
impl Token {
const EXPIRY_THRESHOLD: Duration = Duration::from_secs(10);
pub fn from_json(body: String) -> Result<Self, Error> {
let data: TokenData = serde_json::from_slice(body.as_ref())?;
Ok(Self {
access_token: data.access_token,
expires_in: Duration::from_secs(data.expires_in),
token_type: data.token_type,
scopes: data.scope,
timestamp: Instant::now(),
})
}
pub fn is_expired(&self) -> bool {
self.timestamp + (self.expires_in - Self::EXPIRY_THRESHOLD) < Instant::now()
}
pub fn in_scope(&self, scope: &str) -> bool {
for s in &self.scopes {
if *s == scope {
return true;
}
}
false
}
pub fn in_scopes(&self, scopes: Vec<&str>) -> bool {
for s in scopes {
if !self.in_scope(s) {
return false;
}
}
true
}
}

View file

@ -1,4 +1,97 @@
use std::mem; use std::{
future::Future,
mem,
pin::Pin,
task::{Context, Poll},
};
use futures_core::ready;
use futures_util::{future, FutureExt, Sink, SinkExt};
use tokio::{task::JoinHandle, time::timeout};
/// Returns a future that will flush the sink, even if flushing is temporarily completed.
/// Finishes only if the sink throws an error.
pub(crate) fn keep_flushing<'a, T, S: Sink<T> + Unpin + 'a>(
mut s: S,
) -> impl Future<Output = S::Error> + 'a {
future::poll_fn(move |cx| match s.poll_flush_unpin(cx) {
Poll::Ready(Err(e)) => Poll::Ready(e),
_ => Poll::Pending,
})
}
pub struct CancelOnDrop<T>(pub JoinHandle<T>);
impl<T> Future for CancelOnDrop<T> {
type Output = <JoinHandle<T> as Future>::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.0.poll_unpin(cx)
}
}
impl<T> Drop for CancelOnDrop<T> {
fn drop(&mut self) {
self.0.abort();
}
}
pub struct TimeoutOnDrop<T: Send + 'static> {
handle: Option<JoinHandle<T>>,
timeout: tokio::time::Duration,
}
impl<T: Send + 'static> TimeoutOnDrop<T> {
pub fn new(handle: JoinHandle<T>, timeout: tokio::time::Duration) -> Self {
Self {
handle: Some(handle),
timeout,
}
}
pub fn take(&mut self) -> Option<JoinHandle<T>> {
self.handle.take()
}
}
impl<T: Send + 'static> Future for TimeoutOnDrop<T> {
type Output = <JoinHandle<T> as Future>::Output;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let r = ready!(self
.handle
.as_mut()
.expect("Polled after ready")
.poll_unpin(cx));
self.handle = None;
Poll::Ready(r)
}
}
impl<T: Send + 'static> Drop for TimeoutOnDrop<T> {
fn drop(&mut self) {
let mut handle = if let Some(handle) = self.handle.take() {
handle
} else {
return;
};
if (&mut handle).now_or_never().is_some() {
// Already finished
return;
}
match tokio::runtime::Handle::try_current() {
Ok(h) => {
h.spawn(timeout(self.timeout, CancelOnDrop(handle)));
}
Err(_) => {
// Not in tokio context, can't spawn
handle.abort();
}
}
}
}
pub trait Seq { pub trait Seq {
fn next(&self) -> Self; fn next(&self) -> Self;

View file

@ -1,17 +1,26 @@
/// Version string of the form "librespot-<sha>" /// Version string of the form "librespot-<sha>"
pub const VERSION_STRING: &str = concat!("librespot-", env!("VERGEN_SHA_SHORT")); pub const VERSION_STRING: &str = concat!("librespot-", env!("VERGEN_GIT_SHA_SHORT"));
/// Generate a timestamp string representing the build date (UTC). /// Generate a timestamp string representing the build date (UTC).
pub const BUILD_DATE: &str = env!("VERGEN_BUILD_DATE"); pub const BUILD_DATE: &str = env!("VERGEN_BUILD_DATE");
/// Short sha of the latest git commit. /// Short sha of the latest git commit.
pub const SHA_SHORT: &str = env!("VERGEN_SHA_SHORT"); pub const SHA_SHORT: &str = env!("VERGEN_GIT_SHA_SHORT");
/// Date of the latest git commit. /// Date of the latest git commit.
pub const COMMIT_DATE: &str = env!("VERGEN_COMMIT_DATE"); pub const COMMIT_DATE: &str = env!("VERGEN_GIT_COMMIT_DATE");
/// Librespot crate version. /// Librespot crate version.
pub const SEMVER: &str = env!("CARGO_PKG_VERSION"); pub const SEMVER: &str = env!("CARGO_PKG_VERSION");
/// A random build id. /// A random build id.
pub const BUILD_ID: &str = env!("LIBRESPOT_BUILD_ID"); pub const BUILD_ID: &str = env!("LIBRESPOT_BUILD_ID");
/// The protocol version of the Spotify desktop client.
pub const SPOTIFY_VERSION: u64 = 117300517;
/// The protocol version of the Spotify mobile app.
pub const SPOTIFY_MOBILE_VERSION: &str = "8.6.84";
/// The user agent to fall back to, if one could not be determined dynamically.
pub const FALLBACK_USER_AGENT: &str = "Spotify/117300517 Linux/0 (librespot)";

View file

@ -1,25 +1,19 @@
use std::time::Duration; use std::time::Duration;
use librespot_core::authentication::Credentials;
use librespot_core::config::SessionConfig;
use librespot_core::session::Session;
use tokio::time::timeout; use tokio::time::timeout;
use librespot_core::{authentication::Credentials, config::SessionConfig, session::Session};
#[tokio::test] #[tokio::test]
async fn test_connection() { async fn test_connection() {
timeout(Duration::from_secs(30), async { timeout(Duration::from_secs(30), async {
let result = Session::connect( let result = Session::new(SessionConfig::default(), None)
SessionConfig::default(), .connect(Credentials::with_password("test", "test"), false)
Credentials::with_password("test", "test"), .await;
None,
false,
)
.await;
match result { match result {
Ok(_) => panic!("Authentication succeeded despite of bad credentials."), Ok(_) => panic!("Authentication succeeded despite of bad credentials."),
Err(e) => assert_eq!(e.to_string(), "Login failed with reason: Bad credentials"), Err(e) => assert!(!e.to_string().is_empty()), // there should be some error message
} }
}) })
.await .await

View file

@ -8,32 +8,32 @@ repository = "https://github.com/librespot-org/librespot"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
aes-ctr = "0.6" aes = "0.8"
base64 = "0.13" base64 = "0.13"
cfg-if = "1.0"
ctr = "0.9"
dns-sd = { version = "0.1.3", optional = true }
form_urlencoded = "1.0" form_urlencoded = "1.0"
futures-core = "0.3" futures-core = "0.3"
hmac = "0.11" futures-util = "0.3"
hyper = { version = "0.14", features = ["server", "http1", "tcp"] } hmac = "0.12"
hyper = { version = "0.14", features = ["http1", "server", "tcp"] }
libmdns = "0.7" libmdns = "0.7"
log = "0.4" log = "0.4"
rand = "0.8" rand = "0.8"
serde_json = "1.0.25" serde_json = "1.0"
sha-1 = "0.9" sha1 = "0.10"
thiserror = "1.0" thiserror = "1.0"
tokio = { version = "1.0", features = ["sync", "rt"] } tokio = { version = "1", features = ["parking_lot", "sync", "rt"] }
dns-sd = { version = "0.1.3", optional = true }
[dependencies.librespot-core] [dependencies.librespot-core]
path = "../core" path = "../core"
default_features = false
version = "0.4.2" version = "0.4.2"
[dev-dependencies] [dev-dependencies]
futures = "0.3" futures = "0.3"
hex = "0.4" hex = "0.4"
simple_logger = "2.1" tokio = { version = "1", features = ["macros", "parking_lot", "rt"] }
tokio = { version = "1.0", features = ["macros", "rt"] }
[features] [features]
with-dns-sd = ["dns-sd"] with-dns-sd = ["dns-sd", "librespot-core/with-dns-sd"]

View file

@ -1,15 +1,9 @@
use futures::StreamExt; use futures::StreamExt;
use librespot_discovery::DeviceType; use librespot_discovery::DeviceType;
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use simple_logger::SimpleLogger;
#[tokio::main(flavor = "current_thread")] #[tokio::main(flavor = "current_thread")]
async fn main() { async fn main() {
SimpleLogger::new()
.with_level(log::LevelFilter::Debug)
.init()
.unwrap();
let name = "Librespot"; let name = "Librespot";
let device_id = hex::encode(Sha1::digest(name.as_bytes())); let device_id = hex::encode(Sha1::digest(name.as_bytes()));

View file

@ -7,21 +7,23 @@
//! This library uses mDNS and DNS-SD so that other devices can find it, //! This library uses mDNS and DNS-SD so that other devices can find it,
//! and spawns an http server to answer requests of Spotify clients. //! and spawns an http server to answer requests of Spotify clients.
#![warn(clippy::all, missing_docs, rust_2018_idioms)]
mod server; mod server;
use std::borrow::Cow; use std::{
use std::io; borrow::Cow,
use std::pin::Pin; io,
use std::task::{Context, Poll}; pin::Pin,
task::{Context, Poll},
};
use futures_core::Stream; use futures_core::Stream;
use librespot_core as core;
use thiserror::Error; use thiserror::Error;
use self::server::DiscoveryServer; use self::server::DiscoveryServer;
pub use crate::core::Error;
use librespot_core as core;
/// Credentials to be used in [`librespot`](`librespot_core`). /// Credentials to be used in [`librespot`](`librespot_core`).
pub use crate::core::authentication::Credentials; pub use crate::core::authentication::Credentials;
@ -49,13 +51,29 @@ pub struct Builder {
/// Errors that can occur while setting up a [`Discovery`] instance. /// Errors that can occur while setting up a [`Discovery`] instance.
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum Error { pub enum DiscoveryError {
/// Setting up service discovery via DNS-SD failed. #[error("Creating SHA1 block cipher failed")]
AesError(#[from] aes::cipher::InvalidLength),
#[error("Setting up dns-sd failed: {0}")] #[error("Setting up dns-sd failed: {0}")]
DnsSdError(#[from] io::Error), DnsSdError(#[from] io::Error),
/// Setting up the http server failed. #[error("Creating SHA1 HMAC failed for base key {0:?}")]
#[error("Setting up the http server failed: {0}")] HmacError(Vec<u8>),
#[error("Setting up the HTTP server failed: {0}")]
HttpServerError(#[from] hyper::Error), HttpServerError(#[from] hyper::Error),
#[error("Missing params for key {0}")]
ParamsError(&'static str),
}
impl From<DiscoveryError> for Error {
fn from(err: DiscoveryError) -> Self {
match err {
DiscoveryError::AesError(_) => Error::unavailable(err),
DiscoveryError::DnsSdError(_) => Error::unavailable(err),
DiscoveryError::HmacError(_) => Error::invalid_argument(err),
DiscoveryError::HttpServerError(_) => Error::unavailable(err),
DiscoveryError::ParamsError(_) => Error::invalid_argument(err),
}
}
} }
impl Builder { impl Builder {
@ -97,7 +115,7 @@ impl Builder {
pub fn launch(self) -> Result<Discovery, Error> { pub fn launch(self) -> Result<Discovery, Error> {
let mut port = self.port; let mut port = self.port;
let name = self.server_config.name.clone().into_owned(); let name = self.server_config.name.clone().into_owned();
let server = DiscoveryServer::new(self.server_config, &mut port)?; let server = DiscoveryServer::new(self.server_config, &mut port)??;
#[cfg(feature = "with-dns-sd")] #[cfg(feature = "with-dns-sd")]
let svc = dns_sd::DNSService::register( let svc = dns_sd::DNSService::register(
@ -107,8 +125,7 @@ impl Builder {
None, None,
port, port,
&["VERSION=1.0", "CPath=/"], &["VERSION=1.0", "CPath=/"],
) )?;
.map_err(|e| Error::DnsSdError(io::Error::new(io::ErrorKind::Unsupported, e)))?;
#[cfg(not(feature = "with-dns-sd"))] #[cfg(not(feature = "with-dns-sd"))]
let svc = libmdns::Responder::spawn(&tokio::runtime::Handle::current())?.register( let svc = libmdns::Responder::spawn(&tokio::runtime::Handle::current())?.register(

View file

@ -1,26 +1,34 @@
use std::borrow::Cow; use std::{
use std::collections::BTreeMap; borrow::Cow,
use std::convert::Infallible; collections::BTreeMap,
use std::net::{Ipv4Addr, SocketAddr}; convert::Infallible,
use std::pin::Pin; net::{Ipv4Addr, SocketAddr},
use std::sync::Arc; pin::Pin,
use std::task::{Context, Poll}; sync::Arc,
task::{Context, Poll},
};
use aes_ctr::cipher::generic_array::GenericArray; use aes::cipher::{KeyIvInit, StreamCipher};
use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher};
use aes_ctr::Aes128Ctr;
use futures_core::Stream; use futures_core::Stream;
use hmac::{Hmac, Mac, NewMac}; use futures_util::{FutureExt, TryFutureExt};
use hyper::service::{make_service_fn, service_fn}; use hmac::{Hmac, Mac};
use hyper::{Body, Method, Request, Response, StatusCode}; use hyper::{
use log::{debug, warn}; service::{make_service_fn, service_fn},
Body, Method, Request, Response, StatusCode,
};
use log::{debug, error, warn};
use serde_json::json; use serde_json::json;
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use crate::core::authentication::Credentials; use super::DiscoveryError;
use crate::core::config::DeviceType;
use crate::core::diffie_hellman::DhLocalKeys; use crate::{
core::config::DeviceType,
core::{authentication::Credentials, diffie_hellman::DhLocalKeys, Error},
};
type Aes128Ctr = ctr::Ctr128BE<aes::Aes128>;
type Params<'a> = BTreeMap<Cow<'a, str>, Cow<'a, str>>; type Params<'a> = BTreeMap<Cow<'a, str>, Cow<'a, str>>;
@ -57,7 +65,7 @@ impl RequestHandler {
"status": 101, "status": 101,
"statusString": "ERROR-OK", "statusString": "ERROR-OK",
"spotifyError": 0, "spotifyError": 0,
"version": "2.7.1", "version": crate::core::version::SEMVER,
"deviceID": (self.config.device_id), "deviceID": (self.config.device_id),
"remoteName": (self.config.name), "remoteName": (self.config.name),
"activeUser": "", "activeUser": "",
@ -76,41 +84,58 @@ impl RequestHandler {
Response::new(Body::from(body)) Response::new(Body::from(body))
} }
fn handle_add_user(&self, params: &Params<'_>) -> Response<hyper::Body> { fn handle_add_user(&self, params: &Params<'_>) -> Result<Response<hyper::Body>, Error> {
let username = params.get("userName").unwrap().as_ref(); let username_key = "userName";
let encrypted_blob = params.get("blob").unwrap(); let username = params
let client_key = params.get("clientKey").unwrap(); .get(username_key)
.ok_or(DiscoveryError::ParamsError(username_key))?
.as_ref();
let encrypted_blob = base64::decode(encrypted_blob.as_bytes()).unwrap(); let blob_key = "blob";
let encrypted_blob = params
.get(blob_key)
.ok_or(DiscoveryError::ParamsError(blob_key))?;
let client_key = base64::decode(client_key.as_bytes()).unwrap(); let clientkey_key = "clientKey";
let client_key = params
.get(clientkey_key)
.ok_or(DiscoveryError::ParamsError(clientkey_key))?;
let encrypted_blob = base64::decode(encrypted_blob.as_bytes())?;
let client_key = base64::decode(client_key.as_bytes())?;
let shared_key = self.keys.shared_secret(&client_key); let shared_key = self.keys.shared_secret(&client_key);
let encrypted_blob_len = encrypted_blob.len();
if encrypted_blob_len < 16 {
return Err(DiscoveryError::HmacError(encrypted_blob.to_vec()).into());
}
let iv = &encrypted_blob[0..16]; let iv = &encrypted_blob[0..16];
let encrypted = &encrypted_blob[16..encrypted_blob.len() - 20]; let encrypted = &encrypted_blob[16..encrypted_blob_len - 20];
let cksum = &encrypted_blob[encrypted_blob.len() - 20..encrypted_blob.len()]; let cksum = &encrypted_blob[encrypted_blob_len - 20..encrypted_blob_len];
let base_key = Sha1::digest(&shared_key); let base_key = Sha1::digest(&shared_key);
let base_key = &base_key[..16]; let base_key = &base_key[..16];
let checksum_key = { let checksum_key = {
let mut h = let mut h = Hmac::<Sha1>::new_from_slice(base_key)
Hmac::<Sha1>::new_from_slice(base_key).expect("HMAC can take key of any size"); .map_err(|_| DiscoveryError::HmacError(base_key.to_vec()))?;
h.update(b"checksum"); h.update(b"checksum");
h.finalize().into_bytes() h.finalize().into_bytes()
}; };
let encryption_key = { let encryption_key = {
let mut h = let mut h = Hmac::<Sha1>::new_from_slice(base_key)
Hmac::<Sha1>::new_from_slice(base_key).expect("HMAC can take key of any size"); .map_err(|_| DiscoveryError::HmacError(base_key.to_vec()))?;
h.update(b"encryption"); h.update(b"encryption");
h.finalize().into_bytes() h.finalize().into_bytes()
}; };
let mut h = let mut h = Hmac::<Sha1>::new_from_slice(&checksum_key)
Hmac::<Sha1>::new_from_slice(&checksum_key).expect("HMAC can take key of any size"); .map_err(|_| DiscoveryError::HmacError(base_key.to_vec()))?;
h.update(encrypted); h.update(encrypted);
if h.verify(cksum).is_err() { if h.verify_slice(cksum).is_err() {
warn!("Login error for user {:?}: MAC mismatch", username); warn!("Login error for user {:?}: MAC mismatch", username);
let result = json!({ let result = json!({
"status": 102, "status": 102,
@ -119,22 +144,20 @@ impl RequestHandler {
}); });
let body = result.to_string(); let body = result.to_string();
return Response::new(Body::from(body)); return Ok(Response::new(Body::from(body)));
} }
let decrypted = { let decrypted = {
let mut data = encrypted.to_vec(); let mut data = encrypted.to_vec();
let mut cipher = Aes128Ctr::new( let mut cipher = Aes128Ctr::new_from_slices(&encryption_key[0..16], iv)
GenericArray::from_slice(&encryption_key[0..16]), .map_err(DiscoveryError::AesError)?;
GenericArray::from_slice(iv),
);
cipher.apply_keystream(&mut data); cipher.apply_keystream(&mut data);
data data
}; };
let credentials = Credentials::with_blob(username, &decrypted, &self.config.device_id); let credentials = Credentials::with_blob(username, &decrypted, &self.config.device_id)?;
self.tx.send(credentials).unwrap(); self.tx.send(credentials)?;
let result = json!({ let result = json!({
"status": 101, "status": 101,
@ -143,7 +166,7 @@ impl RequestHandler {
}); });
let body = result.to_string(); let body = result.to_string();
Response::new(Body::from(body)) Ok(Response::new(Body::from(body)))
} }
fn not_found(&self) -> Response<hyper::Body> { fn not_found(&self) -> Response<hyper::Body> {
@ -152,7 +175,10 @@ impl RequestHandler {
res res
} }
async fn handle(self: Arc<Self>, request: Request<Body>) -> hyper::Result<Response<Body>> { async fn handle(
self: Arc<Self>,
request: Request<Body>,
) -> Result<hyper::Result<Response<Body>>, Error> {
let mut params = Params::new(); let mut params = Params::new();
let (parts, body) = request.into_parts(); let (parts, body) = request.into_parts();
@ -172,11 +198,11 @@ impl RequestHandler {
let action = params.get("action").map(Cow::as_ref); let action = params.get("action").map(Cow::as_ref);
Ok(match (parts.method, action) { Ok(Ok(match (parts.method, action) {
(Method::GET, Some("getInfo")) => self.handle_get_info(), (Method::GET, Some("getInfo")) => self.handle_get_info(),
(Method::POST, Some("addUser")) => self.handle_add_user(&params), (Method::POST, Some("addUser")) => self.handle_add_user(&params)?,
_ => self.not_found(), _ => self.not_found(),
}) }))
} }
} }
@ -186,7 +212,7 @@ pub struct DiscoveryServer {
} }
impl DiscoveryServer { impl DiscoveryServer {
pub fn new(config: Config, port: &mut u16) -> hyper::Result<Self> { pub fn new(config: Config, port: &mut u16) -> Result<hyper::Result<Self>, Error> {
let (discovery, cred_rx) = RequestHandler::new(config); let (discovery, cred_rx) = RequestHandler::new(config);
let discovery = Arc::new(discovery); let discovery = Arc::new(discovery);
@ -197,7 +223,14 @@ impl DiscoveryServer {
let make_service = make_service_fn(move |_| { let make_service = make_service_fn(move |_| {
let discovery = discovery.clone(); let discovery = discovery.clone();
async move { async move {
Ok::<_, hyper::Error>(service_fn(move |request| discovery.clone().handle(request))) Ok::<_, hyper::Error>(service_fn(move |request| {
discovery
.clone()
.handle(request)
.inspect_err(|e| error!("could not handle discovery request: {}", e))
.and_then(|x| async move { Ok(x) })
.map(Result::unwrap) // guaranteed by `and_then` above
}))
} }
}); });
@ -209,8 +242,10 @@ impl DiscoveryServer {
tokio::spawn(async { tokio::spawn(async {
let result = server let result = server
.with_graceful_shutdown(async { .with_graceful_shutdown(async {
close_rx.await.unwrap_err();
debug!("Shutting down discovery server"); debug!("Shutting down discovery server");
if close_rx.await.is_ok() {
debug!("unable to close discovery Rx channel completely");
}
}) })
.await; .await;
@ -219,10 +254,10 @@ impl DiscoveryServer {
} }
}); });
Ok(Self { Ok(Ok(Self {
cred_rx, cred_rx,
_close_tx: close_tx, _close_tx: close_tx,
}) }))
} }
} }

View file

@ -1,9 +1,6 @@
use std::env; use std::env;
use librespot::core::authentication::Credentials; use librespot::core::{authentication::Credentials, config::SessionConfig, session::Session};
use librespot::core::config::SessionConfig;
use librespot::core::keymaster;
use librespot::core::session::Session;
const SCOPES: &str = const SCOPES: &str =
"streaming,user-read-playback-state,user-modify-playback-state,user-read-currently-playing"; "streaming,user-read-playback-state,user-modify-playback-state,user-read-currently-playing";
@ -13,21 +10,20 @@ async fn main() {
let session_config = SessionConfig::default(); let session_config = SessionConfig::default();
let args: Vec<_> = env::args().collect(); let args: Vec<_> = env::args().collect();
if args.len() != 4 { if args.len() != 3 {
eprintln!("Usage: {} USERNAME PASSWORD CLIENT_ID", args[0]); eprintln!("Usage: {} USERNAME PASSWORD", args[0]);
return; return;
} }
println!("Connecting.."); println!("Connecting...");
let credentials = Credentials::with_password(&args[1], &args[2]); let credentials = Credentials::with_password(&args[1], &args[2]);
let (session, _) = Session::connect(session_config, credentials, None, false) let session = Session::new(session_config, None);
.await
.unwrap();
println!( match session.connect(credentials, false).await {
"Token: {:#?}", Ok(()) => println!(
keymaster::get_token(&session, &args[3], SCOPES) "Token: {:#?}",
.await session.token_provider().get_token(SCOPES).await.unwrap()
.unwrap() ),
); Err(e) => println!("Error connecting: {}", e),
}
} }

View file

@ -1,13 +1,16 @@
use std::env; use std::{env, process::exit};
use librespot::core::authentication::Credentials; use librespot::{
use librespot::core::config::SessionConfig; core::{
use librespot::core::session::Session; authentication::Credentials, config::SessionConfig, session::Session, spotify_id::SpotifyId,
use librespot::core::spotify_id::SpotifyId; },
use librespot::playback::audio_backend; playback::{
use librespot::playback::config::{AudioFormat, PlayerConfig}; audio_backend,
use librespot::playback::mixer::NoOpVolume; config::{AudioFormat, PlayerConfig},
use librespot::playback::player::Player; mixer::NoOpVolume,
player::Player,
},
};
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
@ -26,10 +29,12 @@ async fn main() {
let backend = audio_backend::find(None).unwrap(); let backend = audio_backend::find(None).unwrap();
println!("Connecting .."); println!("Connecting...");
let (session, _) = Session::connect(session_config, credentials, None, false) let session = Session::new(session_config, None);
.await if let Err(e) = session.connect(credentials, false).await {
.unwrap(); println!("Error connecting: {}", e);
exit(1);
}
let (mut player, _) = Player::new(player_config, session, Box::new(NoOpVolume), move || { let (mut player, _) = Player::new(player_config, session, Box::new(NoOpVolume), move || {
backend(None, audio_format) backend(None, audio_format)

View file

@ -1,11 +1,11 @@
use std::env; use std::{env, process::exit};
use std::process;
use librespot::core::authentication::Credentials; use librespot::{
use librespot::core::config::SessionConfig; core::{
use librespot::core::session::Session; authentication::Credentials, config::SessionConfig, session::Session, spotify_id::SpotifyId,
use librespot::core::spotify_id::SpotifyId; },
use librespot::metadata::{Metadata, Playlist, Track}; metadata::{Metadata, Playlist, Track},
};
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
@ -24,16 +24,18 @@ async fn main() {
"PLAYLIST should be a playlist URI such as: \ "PLAYLIST should be a playlist URI such as: \
\"spotify:playlist:37i9dQZF1DXec50AjHrNTq\"" \"spotify:playlist:37i9dQZF1DXec50AjHrNTq\""
); );
process::exit(1); exit(1);
}); });
let (session, _) = Session::connect(session_config, credentials, None, false) let session = Session::new(session_config, None);
.await if let Err(e) = session.connect(credentials, false).await {
.unwrap(); println!("Error connecting: {}", e);
exit(1);
}
let plist = Playlist::get(&session, plist_uri).await.unwrap(); let plist = Playlist::get(&session, plist_uri).await.unwrap();
println!("{:?}", plist); println!("{:?}", plist);
for track_id in plist.tracks { for track_id in plist.tracks() {
let plist_track = Track::get(&session, track_id).await.unwrap(); let plist_track = Track::get(&session, track_id).await.unwrap();
println!("track: {} ", plist_track.name); println!("track: {} ", plist_track.name);
} }

View file

@ -9,13 +9,17 @@ edition = "2018"
[dependencies] [dependencies]
async-trait = "0.1" async-trait = "0.1"
byteorder = "1.3" byteorder = "1"
protobuf = "2.14.0" bytes = "1"
log = "0.4" log = "0.4"
protobuf = "2"
thiserror = "1"
uuid = { version = "1", default-features = false }
[dependencies.librespot-core] [dependencies.librespot-core]
path = "../core" path = "../core"
version = "0.4.2" version = "0.4.2"
[dependencies.librespot-protocol] [dependencies.librespot-protocol]
path = "../protocol" path = "../protocol"
version = "0.4.2" version = "0.4.2"

137
metadata/src/album.rs Normal file
View file

@ -0,0 +1,137 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{
artist::Artists, availability::Availabilities, copyright::Copyrights, external_id::ExternalIds,
image::Images, request::RequestResult, restriction::Restrictions, sale_period::SalePeriods,
track::Tracks, util::try_from_repeated_message, Metadata,
};
use librespot_core::{date::Date, Error, Session, SpotifyId};
use librespot_protocol as protocol;
pub use protocol::metadata::Album_Type as AlbumType;
use protocol::metadata::Disc as DiscMessage;
#[derive(Debug, Clone)]
pub struct Album {
pub id: SpotifyId,
pub name: String,
pub artists: Artists,
pub album_type: AlbumType,
pub label: String,
pub date: Date,
pub popularity: i32,
pub genres: Vec<String>,
pub covers: Images,
pub external_ids: ExternalIds,
pub discs: Discs,
pub reviews: Vec<String>,
pub copyrights: Copyrights,
pub restrictions: Restrictions,
pub related: Albums,
pub sale_periods: SalePeriods,
pub cover_group: Images,
pub original_title: String,
pub version_title: String,
pub type_str: String,
pub availability: Availabilities,
}
#[derive(Debug, Clone)]
pub struct Albums(pub Vec<SpotifyId>);
impl Deref for Albums {
type Target = Vec<SpotifyId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct Disc {
pub number: i32,
pub name: String,
pub tracks: Tracks,
}
#[derive(Debug, Clone)]
pub struct Discs(pub Vec<Disc>);
impl Deref for Discs {
type Target = Vec<Disc>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Album {
pub fn tracks(&self) -> Tracks {
let result = self
.discs
.iter()
.flat_map(|disc| disc.tracks.deref().clone())
.collect();
Tracks(result)
}
}
#[async_trait]
impl Metadata for Album {
type Message = protocol::metadata::Album;
async fn request(session: &Session, album_id: SpotifyId) -> RequestResult {
session.spclient().get_album_metadata(album_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Album {
type Error = librespot_core::Error;
fn try_from(album: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: album.try_into()?,
name: album.get_name().to_owned(),
artists: album.get_artist().try_into()?,
album_type: album.get_field_type(),
label: album.get_label().to_owned(),
date: album.get_date().try_into()?,
popularity: album.get_popularity(),
genres: album.get_genre().to_vec(),
covers: album.get_cover().into(),
external_ids: album.get_external_id().into(),
discs: album.get_disc().try_into()?,
reviews: album.get_review().to_vec(),
copyrights: album.get_copyright().into(),
restrictions: album.get_restriction().into(),
related: album.get_related().try_into()?,
sale_periods: album.get_sale_period().try_into()?,
cover_group: album.get_cover_group().get_image().into(),
original_title: album.get_original_title().to_owned(),
version_title: album.get_version_title().to_owned(),
type_str: album.get_type_str().to_owned(),
availability: album.get_availability().try_into()?,
})
}
}
try_from_repeated_message!(<Album as Metadata>::Message, Albums);
impl TryFrom<&DiscMessage> for Disc {
type Error = librespot_core::Error;
fn try_from(disc: &DiscMessage) -> Result<Self, Self::Error> {
Ok(Self {
number: disc.get_number(),
name: disc.get_name().to_owned(),
tracks: disc.get_track().try_into()?,
})
}
}
try_from_repeated_message!(DiscMessage, Discs);

129
metadata/src/artist.rs Normal file
View file

@ -0,0 +1,129 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{request::RequestResult, track::Tracks, util::try_from_repeated_message, Metadata};
use librespot_core::{Error, Session, SpotifyId};
use librespot_protocol as protocol;
use protocol::metadata::ArtistWithRole as ArtistWithRoleMessage;
pub use protocol::metadata::ArtistWithRole_ArtistRole as ArtistRole;
use protocol::metadata::TopTracks as TopTracksMessage;
#[derive(Debug, Clone)]
pub struct Artist {
pub id: SpotifyId,
pub name: String,
pub top_tracks: CountryTopTracks,
}
#[derive(Debug, Clone)]
pub struct Artists(pub Vec<SpotifyId>);
impl Deref for Artists {
type Target = Vec<SpotifyId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct ArtistWithRole {
pub id: SpotifyId,
pub name: String,
pub role: ArtistRole,
}
#[derive(Debug, Clone)]
pub struct ArtistsWithRole(pub Vec<ArtistWithRole>);
impl Deref for ArtistsWithRole {
type Target = Vec<ArtistWithRole>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct TopTracks {
pub country: String,
pub tracks: Tracks,
}
#[derive(Debug, Clone)]
pub struct CountryTopTracks(pub Vec<TopTracks>);
impl Deref for CountryTopTracks {
type Target = Vec<TopTracks>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl CountryTopTracks {
pub fn for_country(&self, country: &str) -> Tracks {
if let Some(country) = self.0.iter().find(|top_track| top_track.country == country) {
return country.tracks.clone();
}
if let Some(global) = self.0.iter().find(|top_track| top_track.country.is_empty()) {
return global.tracks.clone();
}
Tracks(vec![]) // none found
}
}
#[async_trait]
impl Metadata for Artist {
type Message = protocol::metadata::Artist;
async fn request(session: &Session, artist_id: SpotifyId) -> RequestResult {
session.spclient().get_artist_metadata(artist_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Artist {
type Error = librespot_core::Error;
fn try_from(artist: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: artist.try_into()?,
name: artist.get_name().to_owned(),
top_tracks: artist.get_top_track().try_into()?,
})
}
}
try_from_repeated_message!(<Artist as Metadata>::Message, Artists);
impl TryFrom<&ArtistWithRoleMessage> for ArtistWithRole {
type Error = librespot_core::Error;
fn try_from(artist_with_role: &ArtistWithRoleMessage) -> Result<Self, Self::Error> {
Ok(Self {
id: artist_with_role.try_into()?,
name: artist_with_role.get_artist_name().to_owned(),
role: artist_with_role.get_role(),
})
}
}
try_from_repeated_message!(ArtistWithRoleMessage, ArtistsWithRole);
impl TryFrom<&TopTracksMessage> for TopTracks {
type Error = librespot_core::Error;
fn try_from(top_tracks: &TopTracksMessage) -> Result<Self, Self::Error> {
Ok(Self {
country: top_tracks.get_country().to_owned(),
tracks: top_tracks.get_track().try_into()?,
})
}
}
try_from_repeated_message!(TopTracksMessage, CountryTopTracks);

View file

@ -0,0 +1,62 @@
use std::{collections::HashMap, fmt::Debug, ops::Deref};
use librespot_core::FileId;
use librespot_protocol as protocol;
use protocol::metadata::AudioFile as AudioFileMessage;
pub use protocol::metadata::AudioFile_Format as AudioFileFormat;
#[derive(Debug, Clone)]
pub struct AudioFiles(pub HashMap<AudioFileFormat, FileId>);
impl Deref for AudioFiles {
type Target = HashMap<AudioFileFormat, FileId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl AudioFiles {
pub fn is_ogg_vorbis(format: AudioFileFormat) -> bool {
matches!(
format,
AudioFileFormat::OGG_VORBIS_320
| AudioFileFormat::OGG_VORBIS_160
| AudioFileFormat::OGG_VORBIS_96
)
}
pub fn is_mp3(format: AudioFileFormat) -> bool {
matches!(
format,
AudioFileFormat::MP3_320
| AudioFileFormat::MP3_256
| AudioFileFormat::MP3_160
| AudioFileFormat::MP3_96
| AudioFileFormat::MP3_160_ENC
)
}
pub fn is_flac(format: AudioFileFormat) -> bool {
matches!(format, AudioFileFormat::FLAC_FLAC)
}
}
impl From<&[AudioFileMessage]> for AudioFiles {
fn from(files: &[AudioFileMessage]) -> Self {
let audio_files = files
.iter()
.filter_map(|file| {
let file_id = FileId::from(file.get_file_id());
if file.has_format() {
Some((file.get_format(), file_id))
} else {
trace!("Ignoring file <{}> with unspecified format", file_id);
None
}
})
.collect();
AudioFiles(audio_files)
}
}

113
metadata/src/audio/item.rs Normal file
View file

@ -0,0 +1,113 @@
use std::fmt::Debug;
use crate::{
availability::{AudioItemAvailability, Availabilities, UnavailabilityReason},
episode::Episode,
error::MetadataError,
restriction::Restrictions,
track::{Track, Tracks},
};
use super::file::AudioFiles;
use librespot_core::{
date::Date, session::UserData, spotify_id::SpotifyItemType, Error, Session, SpotifyId,
};
pub type AudioItemResult = Result<AudioItem, Error>;
// A wrapper with fields the player needs
#[derive(Debug, Clone)]
pub struct AudioItem {
pub id: SpotifyId,
pub spotify_uri: String,
pub files: AudioFiles,
pub name: String,
pub duration: i32,
pub availability: AudioItemAvailability,
pub alternatives: Option<Tracks>,
pub is_explicit: bool,
}
impl AudioItem {
pub async fn get_file(session: &Session, id: SpotifyId) -> AudioItemResult {
match id.item_type {
SpotifyItemType::Track => Track::get_audio_item(session, id).await,
SpotifyItemType::Episode => Episode::get_audio_item(session, id).await,
_ => Err(Error::unavailable(MetadataError::NonPlayable)),
}
}
}
#[async_trait]
pub trait InnerAudioItem {
async fn get_audio_item(session: &Session, id: SpotifyId) -> AudioItemResult;
fn allowed_for_user(
user_data: &UserData,
restrictions: &Restrictions,
) -> AudioItemAvailability {
let country = &user_data.country;
let user_catalogue = match user_data.attributes.get("catalogue") {
Some(catalogue) => catalogue,
None => "premium",
};
for premium_restriction in restrictions.iter().filter(|restriction| {
restriction
.catalogue_strs
.iter()
.any(|restricted_catalogue| restricted_catalogue == user_catalogue)
}) {
if let Some(allowed_countries) = &premium_restriction.countries_allowed {
// A restriction will specify either a whitelast *or* a blacklist,
// but not both. So restrict availability if there is a whitelist
// and the country isn't on it.
if allowed_countries.iter().any(|allowed| country == allowed) {
return Ok(());
} else {
return Err(UnavailabilityReason::NotWhitelisted);
}
}
if let Some(forbidden_countries) = &premium_restriction.countries_forbidden {
if forbidden_countries
.iter()
.any(|forbidden| country == forbidden)
{
return Err(UnavailabilityReason::Blacklisted);
} else {
return Ok(());
}
}
}
Ok(()) // no restrictions in place
}
fn available(availability: &Availabilities) -> AudioItemAvailability {
if availability.is_empty() {
// not all items have availability specified
return Ok(());
}
if !(availability
.iter()
.any(|availability| Date::now_utc() >= availability.start))
{
return Err(UnavailabilityReason::Embargo);
}
Ok(())
}
fn available_for_user(
user_data: &UserData,
availability: &Availabilities,
restrictions: &Restrictions,
) -> AudioItemAvailability {
Self::available(availability)?;
Self::allowed_for_user(user_data, restrictions)?;
Ok(())
}
}

View file

@ -0,0 +1,5 @@
pub mod file;
pub mod item;
pub use file::{AudioFileFormat, AudioFiles};
pub use item::AudioItem;

View file

@ -0,0 +1,56 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use thiserror::Error;
use crate::util::try_from_repeated_message;
use librespot_core::date::Date;
use librespot_protocol as protocol;
use protocol::metadata::Availability as AvailabilityMessage;
pub type AudioItemAvailability = Result<(), UnavailabilityReason>;
#[derive(Debug, Clone)]
pub struct Availability {
pub catalogue_strs: Vec<String>,
pub start: Date,
}
#[derive(Debug, Clone)]
pub struct Availabilities(pub Vec<Availability>);
impl Deref for Availabilities {
type Target = Vec<Availability>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Copy, Clone, Error)]
pub enum UnavailabilityReason {
#[error("blacklist present and country on it")]
Blacklisted,
#[error("available date is in the future")]
Embargo,
#[error("required data was not present")]
NoData,
#[error("whitelist present and country not on it")]
NotWhitelisted,
}
impl TryFrom<&AvailabilityMessage> for Availability {
type Error = librespot_core::Error;
fn try_from(availability: &AvailabilityMessage) -> Result<Self, Self::Error> {
Ok(Self {
catalogue_strs: availability.get_catalogue_str().to_vec(),
start: availability.get_start().try_into()?,
})
}
}
try_from_repeated_message!(AvailabilityMessage, Availabilities);

View file

@ -0,0 +1,33 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::from_repeated_message;
use librespot_protocol as protocol;
use protocol::metadata::ContentRating as ContentRatingMessage;
#[derive(Debug, Clone)]
pub struct ContentRating {
pub country: String,
pub tags: Vec<String>,
}
#[derive(Debug, Clone)]
pub struct ContentRatings(pub Vec<ContentRating>);
impl Deref for ContentRatings {
type Target = Vec<ContentRating>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&ContentRatingMessage> for ContentRating {
fn from(content_rating: &ContentRatingMessage) -> Self {
Self {
country: content_rating.get_country().to_owned(),
tags: content_rating.get_tag().to_vec(),
}
}
}
from_repeated_message!(ContentRatingMessage, ContentRatings);

34
metadata/src/copyright.rs Normal file
View file

@ -0,0 +1,34 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::from_repeated_message;
use librespot_protocol as protocol;
use protocol::metadata::Copyright as CopyrightMessage;
pub use protocol::metadata::Copyright_Type as CopyrightType;
#[derive(Debug, Clone)]
pub struct Copyright {
pub copyright_type: CopyrightType,
pub text: String,
}
#[derive(Debug, Clone)]
pub struct Copyrights(pub Vec<Copyright>);
impl Deref for Copyrights {
type Target = Vec<Copyright>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&CopyrightMessage> for Copyright {
fn from(copyright: &CopyrightMessage) -> Self {
Self {
copyright_type: copyright.get_field_type(),
text: copyright.get_text().to_owned(),
}
}
}
from_repeated_message!(CopyrightMessage, Copyrights);

View file

@ -1,19 +0,0 @@
use byteorder::{BigEndian, WriteBytesExt};
use std::io::Write;
use librespot_core::channel::ChannelData;
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
pub fn get(session: &Session, file: FileId) -> ChannelData {
let (channel_id, channel) = session.channel().allocate();
let (_headers, data) = channel.split();
let mut packet: Vec<u8> = Vec::new();
packet.write_u16::<BigEndian>(channel_id).unwrap();
packet.write_u16::<BigEndian>(0).unwrap();
packet.write(&file.0).unwrap();
session.send_packet(0x19, packet);
data
}

133
metadata/src/episode.rs Normal file
View file

@ -0,0 +1,133 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{
audio::{
file::AudioFiles,
item::{AudioItem, AudioItemResult, InnerAudioItem},
},
availability::Availabilities,
content_rating::ContentRatings,
image::Images,
request::RequestResult,
restriction::Restrictions,
util::try_from_repeated_message,
video::VideoFiles,
Metadata,
};
use librespot_core::{date::Date, Error, Session, SpotifyId};
use librespot_protocol as protocol;
pub use protocol::metadata::Episode_EpisodeType as EpisodeType;
#[derive(Debug, Clone)]
pub struct Episode {
pub id: SpotifyId,
pub name: String,
pub duration: i32,
pub audio: AudioFiles,
pub description: String,
pub number: i32,
pub publish_time: Date,
pub covers: Images,
pub language: String,
pub is_explicit: bool,
pub show: SpotifyId,
pub videos: VideoFiles,
pub video_previews: VideoFiles,
pub audio_previews: AudioFiles,
pub restrictions: Restrictions,
pub freeze_frames: Images,
pub keywords: Vec<String>,
pub allow_background_playback: bool,
pub availability: Availabilities,
pub external_url: String,
pub episode_type: EpisodeType,
pub has_music_and_talk: bool,
pub content_rating: ContentRatings,
pub is_audiobook_chapter: bool,
}
#[derive(Debug, Clone)]
pub struct Episodes(pub Vec<SpotifyId>);
impl Deref for Episodes {
type Target = Vec<SpotifyId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[async_trait]
impl InnerAudioItem for Episode {
async fn get_audio_item(session: &Session, id: SpotifyId) -> AudioItemResult {
let episode = Self::get(session, id).await?;
let availability = Self::available_for_user(
&session.user_data(),
&episode.availability,
&episode.restrictions,
);
Ok(AudioItem {
id,
spotify_uri: id.to_uri()?,
files: episode.audio,
name: episode.name,
duration: episode.duration,
availability,
alternatives: None,
is_explicit: episode.is_explicit,
})
}
}
#[async_trait]
impl Metadata for Episode {
type Message = protocol::metadata::Episode;
async fn request(session: &Session, episode_id: SpotifyId) -> RequestResult {
session.spclient().get_episode_metadata(episode_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Episode {
type Error = librespot_core::Error;
fn try_from(episode: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: episode.try_into()?,
name: episode.get_name().to_owned(),
duration: episode.get_duration().to_owned(),
audio: episode.get_audio().into(),
description: episode.get_description().to_owned(),
number: episode.get_number(),
publish_time: episode.get_publish_time().try_into()?,
covers: episode.get_cover_image().get_image().into(),
language: episode.get_language().to_owned(),
is_explicit: episode.get_explicit().to_owned(),
show: episode.get_show().try_into()?,
videos: episode.get_video().into(),
video_previews: episode.get_video_preview().into(),
audio_previews: episode.get_audio_preview().into(),
restrictions: episode.get_restriction().into(),
freeze_frames: episode.get_freeze_frame().get_image().into(),
keywords: episode.get_keyword().to_vec(),
allow_background_playback: episode.get_allow_background_playback(),
availability: episode.get_availability().try_into()?,
external_url: episode.get_external_url().to_owned(),
episode_type: episode.get_field_type(),
has_music_and_talk: episode.get_music_and_talk(),
content_rating: episode.get_content_rating().into(),
is_audiobook_chapter: episode.get_is_audiobook_chapter(),
})
}
}
try_from_repeated_message!(<Episode as Metadata>::Message, Episodes);

10
metadata/src/error.rs Normal file
View file

@ -0,0 +1,10 @@
use std::fmt::Debug;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum MetadataError {
#[error("empty response")]
Empty,
#[error("audio item is non-playable when it should be")]
NonPlayable,
}

View file

@ -0,0 +1,33 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::from_repeated_message;
use librespot_protocol as protocol;
use protocol::metadata::ExternalId as ExternalIdMessage;
#[derive(Debug, Clone)]
pub struct ExternalId {
pub external_type: String,
pub id: String, // this can be anything from a URL to a ISRC, EAN or UPC
}
#[derive(Debug, Clone)]
pub struct ExternalIds(pub Vec<ExternalId>);
impl Deref for ExternalIds {
type Target = Vec<ExternalId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&ExternalIdMessage> for ExternalId {
fn from(external_id: &ExternalIdMessage) -> Self {
Self {
external_type: external_id.get_field_type().to_owned(),
id: external_id.get_id().to_owned(),
}
}
}
from_repeated_message!(ExternalIdMessage, ExternalIds);

101
metadata/src/image.rs Normal file
View file

@ -0,0 +1,101 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::util::{from_repeated_message, try_from_repeated_message};
use librespot_core::{FileId, SpotifyId};
use librespot_protocol as protocol;
use protocol::metadata::Image as ImageMessage;
pub use protocol::metadata::Image_Size as ImageSize;
use protocol::playlist4_external::PictureSize as PictureSizeMessage;
use protocol::playlist_annotate3::TranscodedPicture as TranscodedPictureMessage;
#[derive(Debug, Clone)]
pub struct Image {
pub id: FileId,
pub size: ImageSize,
pub width: i32,
pub height: i32,
}
#[derive(Debug, Clone)]
pub struct Images(pub Vec<Image>);
impl Deref for Images {
type Target = Vec<Image>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct PictureSize {
pub target_name: String,
pub url: String,
}
#[derive(Debug, Clone)]
pub struct PictureSizes(pub Vec<PictureSize>);
impl Deref for PictureSizes {
type Target = Vec<PictureSize>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct TranscodedPicture {
pub target_name: String,
pub uri: SpotifyId,
}
#[derive(Debug, Clone)]
pub struct TranscodedPictures(pub Vec<TranscodedPicture>);
impl Deref for TranscodedPictures {
type Target = Vec<TranscodedPicture>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&ImageMessage> for Image {
fn from(image: &ImageMessage) -> Self {
Self {
id: image.into(),
size: image.get_size(),
width: image.get_width(),
height: image.get_height(),
}
}
}
from_repeated_message!(ImageMessage, Images);
impl From<&PictureSizeMessage> for PictureSize {
fn from(size: &PictureSizeMessage) -> Self {
Self {
target_name: size.get_target_name().to_owned(),
url: size.get_url().to_owned(),
}
}
}
from_repeated_message!(PictureSizeMessage, PictureSizes);
impl TryFrom<&TranscodedPictureMessage> for TranscodedPicture {
type Error = librespot_core::Error;
fn try_from(picture: &TranscodedPictureMessage) -> Result<Self, Self::Error> {
Ok(Self {
target_name: picture.get_target_name().to_owned(),
uri: picture.try_into()?,
})
}
}
try_from_repeated_message!(TranscodedPictureMessage, TranscodedPictures);

View file

@ -1,534 +1,56 @@
#![allow(clippy::unused_io_amount)]
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[macro_use] #[macro_use]
extern crate async_trait; extern crate async_trait;
pub mod cover;
use std::collections::HashMap;
use std::string::FromUtf8Error;
use librespot_core::mercury::MercuryError;
use librespot_core::session::Session;
use librespot_core::spotify_id::{FileId, SpotifyAudioType, SpotifyId, SpotifyIdError};
use librespot_protocol as protocol;
use protobuf::Message; use protobuf::Message;
pub use crate::protocol::metadata::AudioFile_Format as FileFormat; use librespot_core::{Error, Session, SpotifyId};
fn countrylist_contains(list: &str, country: &str) -> bool { pub mod album;
list.chunks(2).any(|cc| cc == country) pub mod artist;
} pub mod audio;
pub mod availability;
pub mod content_rating;
pub mod copyright;
pub mod episode;
pub mod error;
pub mod external_id;
pub mod image;
pub mod playlist;
mod request;
pub mod restriction;
pub mod sale_period;
pub mod show;
pub mod track;
mod util;
pub mod video;
fn parse_restrictions<'s, I>(restrictions: I, country: &str, catalogue: &str) -> bool pub use error::MetadataError;
where use request::RequestResult;
I: IntoIterator<Item = &'s protocol::metadata::Restriction>,
{
let mut forbidden = "".to_string();
let mut has_forbidden = false;
let mut allowed = "".to_string(); pub use album::Album;
let mut has_allowed = false; pub use artist::Artist;
pub use episode::Episode;
let rs = restrictions pub use playlist::Playlist;
.into_iter() pub use show::Show;
.filter(|r| r.get_catalogue_str().contains(&catalogue.to_owned())); pub use track::Track;
for r in rs {
if r.has_countries_forbidden() {
forbidden.push_str(r.get_countries_forbidden());
has_forbidden = true;
}
if r.has_countries_allowed() {
allowed.push_str(r.get_countries_allowed());
has_allowed = true;
}
}
(has_forbidden || has_allowed)
&& (!has_forbidden || !countrylist_contains(forbidden.as_str(), country))
&& (!has_allowed || countrylist_contains(allowed.as_str(), country))
}
// A wrapper with fields the player needs
#[derive(Debug, Clone)]
pub struct AudioItem {
pub id: SpotifyId,
pub uri: String,
pub files: HashMap<FileFormat, FileId>,
pub name: String,
pub duration: i32,
pub available: bool,
pub alternatives: Option<Vec<SpotifyId>>,
}
impl AudioItem {
pub async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<Self, MercuryError> {
match id.audio_type {
SpotifyAudioType::Track => Track::get_audio_item(session, id).await,
SpotifyAudioType::Podcast => Episode::get_audio_item(session, id).await,
SpotifyAudioType::NonPlayable => Err(MercuryError),
}
}
}
#[async_trait]
trait AudioFiles {
async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError>;
}
#[async_trait]
impl AudioFiles for Track {
async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError> {
match id.to_base62() {
Err(e) => {
warn!("Invalid Track SpotifyId: {}", e);
Err(MercuryError)
}
Ok(uri) => {
let item = Self::get(session, id).await?;
Ok(AudioItem {
id,
uri: format!("spotify:track:{}", uri),
files: item.files,
name: item.name,
duration: item.duration,
available: item.available,
alternatives: Some(item.alternatives),
})
}
}
}
}
#[async_trait]
impl AudioFiles for Episode {
async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError> {
match id.to_base62() {
Err(e) => {
warn!("Invalid Episode SpotifyId: {}", e);
Err(MercuryError)
}
Ok(uri) => {
let item = Self::get(session, id).await?;
Ok(AudioItem {
id,
uri: format!("spotify:episode:{}", uri),
files: item.files,
name: item.name,
duration: item.duration,
available: item.available,
alternatives: None,
})
}
}
}
}
#[async_trait] #[async_trait]
pub trait Metadata: Send + Sized + 'static { pub trait Metadata: Send + Sized + 'static {
type Message: protobuf::Message; type Message: protobuf::Message;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error>; // Request a protobuf
fn parse(msg: &Self::Message, session: &Session) -> Result<Self, SpotifyIdError>; async fn request(session: &Session, id: SpotifyId) -> RequestResult;
async fn get(session: &Session, id: SpotifyId) -> Result<Self, MercuryError> { // Request a metadata struct
match Self::request_url(id) { async fn get(session: &Session, id: SpotifyId) -> Result<Self, Error> {
Err(e) => { let response = Self::request(session, id).await?;
warn!("Invalid SpotifyId: {}", e); let msg = Self::Message::parse_from_bytes(&response)?;
Err(MercuryError) trace!("Received metadata: {:#?}", msg);
} Self::parse(&msg, id)
Ok(uri) => {
let response = session.mercury().get(uri).await?;
match response.payload.first() {
None => {
warn!("Empty payload");
Err(MercuryError)
}
Some(data) => match Self::Message::parse_from_bytes(data) {
Err(e) => {
warn!("Error parsing message from bytes: {}", e);
Err(MercuryError)
}
Ok(msg) => match Self::parse(&msg, session) {
Err(e) => {
warn!("Error parsing message: {:?}", e);
Err(MercuryError)
}
Ok(parsed_msg) => Ok(parsed_msg),
},
},
}
}
}
}
}
#[derive(Debug, Clone)]
pub struct Track {
pub id: SpotifyId,
pub name: String,
pub duration: i32,
pub album: SpotifyId,
pub artists: Vec<SpotifyId>,
pub files: HashMap<FileFormat, FileId>,
pub alternatives: Vec<SpotifyId>,
pub available: bool,
}
#[derive(Debug, Clone)]
pub struct Album {
pub id: SpotifyId,
pub name: String,
pub artists: Vec<SpotifyId>,
pub tracks: Vec<SpotifyId>,
pub covers: Vec<FileId>,
}
#[derive(Debug, Clone)]
pub struct Episode {
pub id: SpotifyId,
pub name: String,
pub external_url: String,
pub duration: i32,
pub language: String,
pub show: SpotifyId,
pub files: HashMap<FileFormat, FileId>,
pub covers: Vec<FileId>,
pub available: bool,
pub explicit: bool,
}
#[derive(Debug, Clone)]
pub struct Show {
pub id: SpotifyId,
pub name: String,
pub publisher: String,
pub episodes: Vec<SpotifyId>,
pub covers: Vec<FileId>,
}
#[derive(Debug, Clone)]
pub struct Playlist {
pub revision: Vec<u8>,
pub user: String,
pub name: String,
pub tracks: Vec<SpotifyId>,
}
#[derive(Debug, Clone)]
pub struct Artist {
pub id: SpotifyId,
pub name: String,
pub top_tracks: Vec<SpotifyId>,
}
impl Metadata for Track {
type Message = protocol::metadata::Track;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base16()?;
Ok(format!("hm://metadata/3/track/{}", id))
} }
fn parse(msg: &Self::Message, session: &Session) -> Result<Self, SpotifyIdError> { fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error>;
let country = session.country();
let artists = msg
.get_artist()
.iter()
.filter_map(|artist| {
if artist.has_gid() {
SpotifyId::from_raw(artist.get_gid()).ok()
} else {
None
}
})
.collect();
let files = msg
.get_file()
.iter()
.filter_map(|file| {
if file.has_file_id() {
let mut dst = [0u8; 20];
dst.clone_from_slice(file.get_file_id());
Some((file.get_format(), FileId(dst)))
} else {
None
}
})
.collect();
Ok(Track {
id: SpotifyId::from_raw(msg.get_gid())?,
name: msg.get_name().to_owned(),
duration: msg.get_duration(),
album: SpotifyId::from_raw(msg.get_album().get_gid())?,
artists,
files,
alternatives: msg
.get_alternative()
.iter()
.filter_map(|alt| SpotifyId::from_raw(alt.get_gid()).ok())
.collect(),
available: parse_restrictions(msg.get_restriction(), &country, "premium"),
})
}
}
impl Metadata for Album {
type Message = protocol::metadata::Album;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base16()?;
Ok(format!("hm://metadata/3/album/{}", id))
}
fn parse(msg: &Self::Message, _: &Session) -> Result<Self, SpotifyIdError> {
let artists = msg
.get_artist()
.iter()
.filter_map(|artist| {
if artist.has_gid() {
SpotifyId::from_raw(artist.get_gid()).ok()
} else {
None
}
})
.collect();
let tracks = msg
.get_disc()
.iter()
.flat_map(|disc| disc.get_track())
.filter_map(|track| {
if track.has_gid() {
SpotifyId::from_raw(track.get_gid()).ok()
} else {
None
}
})
.collect();
let covers = msg
.get_cover_group()
.get_image()
.iter()
.filter_map(|image| {
if image.has_file_id() {
let mut dst = [0u8; 20];
dst.clone_from_slice(image.get_file_id());
Some(FileId(dst))
} else {
None
}
})
.collect();
Ok(Album {
id: SpotifyId::from_raw(msg.get_gid())?,
name: msg.get_name().to_owned(),
artists,
tracks,
covers,
})
}
}
impl Metadata for Playlist {
type Message = protocol::playlist4changes::SelectedListContent;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base62()?;
Ok(format!("hm://playlist/v2/playlist/{}", id))
}
fn parse(msg: &Self::Message, _: &Session) -> Result<Self, SpotifyIdError> {
let tracks = msg
.get_contents()
.get_items()
.iter()
.filter_map(|item| {
let uri_split = item.get_uri().split(':');
let uri_parts: Vec<&str> = uri_split.collect();
SpotifyId::from_base62(uri_parts[2]).ok()
})
.collect::<Vec<_>>();
if tracks.len() != msg.get_length() as usize {
warn!(
"Got {} tracks, but the playlist should contain {} tracks.",
tracks.len(),
msg.get_length()
);
}
Ok(Playlist {
revision: msg.get_revision().to_vec(),
name: msg.get_attributes().get_name().to_owned(),
tracks,
user: msg.get_owner_username().to_string(),
})
}
}
impl Metadata for Artist {
type Message = protocol::metadata::Artist;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base16()?;
Ok(format!("hm://metadata/3/artist/{}", id))
}
fn parse(msg: &Self::Message, session: &Session) -> Result<Self, SpotifyIdError> {
let country = session.country();
let top_tracks: Vec<SpotifyId> = match msg
.get_top_track()
.iter()
.find(|tt| !tt.has_country() || countrylist_contains(tt.get_country(), &country))
{
Some(tracks) => tracks
.get_track()
.iter()
.filter_map(|track| {
if track.has_gid() {
SpotifyId::from_raw(track.get_gid()).ok()
} else {
None
}
})
.collect(),
None => Vec::new(),
};
Ok(Artist {
id: SpotifyId::from_raw(msg.get_gid())?,
name: msg.get_name().to_owned(),
top_tracks,
})
}
}
// Podcast
impl Metadata for Episode {
type Message = protocol::metadata::Episode;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base16()?;
Ok(format!("hm://metadata/3/episode/{}", id))
}
fn parse(msg: &Self::Message, session: &Session) -> Result<Self, SpotifyIdError> {
let country = session.country();
let files = msg
.get_file()
.iter()
.filter(|file| file.has_file_id())
.map(|file| {
let mut dst = [0u8; 20];
dst.clone_from_slice(file.get_file_id());
(file.get_format(), FileId(dst))
})
.collect();
let covers = msg
.get_covers()
.get_image()
.iter()
.filter(|image| image.has_file_id())
.map(|image| {
let mut dst = [0u8; 20];
dst.clone_from_slice(image.get_file_id());
FileId(dst)
})
.collect();
Ok(Episode {
id: SpotifyId::from_raw(msg.get_gid()).unwrap(),
name: msg.get_name().to_owned(),
external_url: msg.get_external_url().to_owned(),
duration: msg.get_duration().to_owned(),
language: msg.get_language().to_owned(),
show: SpotifyId::from_raw(msg.get_show().get_gid()).unwrap(),
covers,
files,
available: parse_restrictions(msg.get_restriction(), &country, "premium"),
explicit: msg.get_explicit().to_owned(),
})
}
}
impl Metadata for Show {
type Message = protocol::metadata::Show;
fn request_url(id: SpotifyId) -> Result<String, FromUtf8Error> {
let id = id.to_base16()?;
Ok(format!("hm://metadata/3/show/{}", id))
}
fn parse(msg: &Self::Message, _: &Session) -> Result<Self, SpotifyIdError> {
let episodes = msg
.get_episode()
.iter()
.filter_map(|episode| {
if episode.has_gid() {
SpotifyId::from_raw(episode.get_gid()).ok()
} else {
None
}
})
.collect();
let covers = msg
.get_covers()
.get_image()
.iter()
.filter(|image| image.has_file_id())
.map(|image| {
let mut dst = [0u8; 20];
dst.clone_from_slice(image.get_file_id());
FileId(dst)
})
.collect();
Ok(Show {
id: SpotifyId::from_raw(msg.get_gid()).unwrap(),
name: msg.get_name().to_owned(),
publisher: msg.get_publisher().to_owned(),
episodes,
covers,
})
}
}
struct StrChunks<'s>(&'s str, usize);
trait StrChunksExt {
fn chunks(&self, size: usize) -> StrChunks;
}
impl StrChunksExt for str {
fn chunks(&self, size: usize) -> StrChunks {
StrChunks(self, size)
}
}
impl<'s> Iterator for StrChunks<'s> {
type Item = &'s str;
fn next(&mut self) -> Option<&'s str> {
let &mut StrChunks(data, size) = self;
if data.is_empty() {
None
} else {
let ret = Some(&data[..size]);
self.0 = &data[size..];
ret
}
}
} }

View file

@ -0,0 +1,87 @@
use std::convert::{TryFrom, TryInto};
use std::fmt::Debug;
use protobuf::Message;
use crate::{
image::TranscodedPictures,
request::{MercuryRequest, RequestResult},
Metadata,
};
use librespot_core::{Error, Session, SpotifyId};
use librespot_protocol as protocol;
pub use protocol::playlist_annotate3::AbuseReportState;
#[derive(Debug, Clone)]
pub struct PlaylistAnnotation {
pub description: String,
pub picture: String,
pub transcoded_pictures: TranscodedPictures,
pub has_abuse_reporting: bool,
pub abuse_report_state: AbuseReportState,
}
#[async_trait]
impl Metadata for PlaylistAnnotation {
type Message = protocol::playlist_annotate3::PlaylistAnnotation;
async fn request(session: &Session, playlist_id: SpotifyId) -> RequestResult {
let current_user = session.username();
Self::request_for_user(session, &current_user, playlist_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Ok(Self {
description: msg.get_description().to_owned(),
picture: msg.get_picture().to_owned(), // TODO: is this a URL or Spotify URI?
transcoded_pictures: msg.get_transcoded_picture().try_into()?,
has_abuse_reporting: msg.get_is_abuse_reporting_enabled(),
abuse_report_state: msg.get_abuse_report_state(),
})
}
}
impl PlaylistAnnotation {
async fn request_for_user(
session: &Session,
username: &str,
playlist_id: SpotifyId,
) -> RequestResult {
let uri = format!(
"hm://playlist-annotate/v1/annotation/user/{}/playlist/{}",
username,
playlist_id.to_base62()?
);
<Self as MercuryRequest>::request(session, &uri).await
}
#[allow(dead_code)]
async fn get_for_user(
session: &Session,
username: &str,
playlist_id: SpotifyId,
) -> Result<Self, Error> {
let response = Self::request_for_user(session, username, playlist_id).await?;
let msg = <Self as Metadata>::Message::parse_from_bytes(&response)?;
Self::parse(&msg, playlist_id)
}
}
impl MercuryRequest for PlaylistAnnotation {}
impl TryFrom<&<PlaylistAnnotation as Metadata>::Message> for PlaylistAnnotation {
type Error = librespot_core::Error;
fn try_from(
annotation: &<PlaylistAnnotation as Metadata>::Message,
) -> Result<Self, Self::Error> {
Ok(Self {
description: annotation.get_description().to_owned(),
picture: annotation.get_picture().to_owned(),
transcoded_pictures: annotation.get_transcoded_picture().try_into()?,
has_abuse_reporting: annotation.get_is_abuse_reporting_enabled(),
abuse_report_state: annotation.get_abuse_report_state(),
})
}
}

View file

@ -0,0 +1,196 @@
use std::{
collections::HashMap,
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{image::PictureSizes, util::from_repeated_enum};
use librespot_core::date::Date;
use librespot_protocol as protocol;
use protocol::playlist4_external::FormatListAttribute as PlaylistFormatAttributeMessage;
pub use protocol::playlist4_external::ItemAttributeKind as PlaylistItemAttributeKind;
use protocol::playlist4_external::ItemAttributes as PlaylistItemAttributesMessage;
use protocol::playlist4_external::ItemAttributesPartialState as PlaylistPartialItemAttributesMessage;
pub use protocol::playlist4_external::ListAttributeKind as PlaylistAttributeKind;
use protocol::playlist4_external::ListAttributes as PlaylistAttributesMessage;
use protocol::playlist4_external::ListAttributesPartialState as PlaylistPartialAttributesMessage;
use protocol::playlist4_external::UpdateItemAttributes as PlaylistUpdateItemAttributesMessage;
use protocol::playlist4_external::UpdateListAttributes as PlaylistUpdateAttributesMessage;
#[derive(Debug, Clone)]
pub struct PlaylistAttributes {
pub name: String,
pub description: String,
pub picture: Vec<u8>,
pub is_collaborative: bool,
pub pl3_version: String,
pub is_deleted_by_owner: bool,
pub client_id: String,
pub format: String,
pub format_attributes: PlaylistFormatAttribute,
pub picture_sizes: PictureSizes,
}
#[derive(Debug, Clone)]
pub struct PlaylistAttributeKinds(pub Vec<PlaylistAttributeKind>);
impl Deref for PlaylistAttributeKinds {
type Target = Vec<PlaylistAttributeKind>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
from_repeated_enum!(PlaylistAttributeKind, PlaylistAttributeKinds);
#[derive(Debug, Clone)]
pub struct PlaylistFormatAttribute(pub HashMap<String, String>);
impl Deref for PlaylistFormatAttribute {
type Target = HashMap<String, String>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct PlaylistItemAttributes {
pub added_by: String,
pub timestamp: Date,
pub seen_at: Date,
pub is_public: bool,
pub format_attributes: PlaylistFormatAttribute,
pub item_id: Vec<u8>,
}
#[derive(Debug, Clone)]
pub struct PlaylistItemAttributeKinds(pub Vec<PlaylistItemAttributeKind>);
impl Deref for PlaylistItemAttributeKinds {
type Target = Vec<PlaylistItemAttributeKind>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
from_repeated_enum!(PlaylistItemAttributeKind, PlaylistItemAttributeKinds);
#[derive(Debug, Clone)]
pub struct PlaylistPartialAttributes {
#[allow(dead_code)]
values: PlaylistAttributes,
#[allow(dead_code)]
no_value: PlaylistAttributeKinds,
}
#[derive(Debug, Clone)]
pub struct PlaylistPartialItemAttributes {
#[allow(dead_code)]
values: PlaylistItemAttributes,
#[allow(dead_code)]
no_value: PlaylistItemAttributeKinds,
}
#[derive(Debug, Clone)]
pub struct PlaylistUpdateAttributes {
pub new_attributes: PlaylistPartialAttributes,
pub old_attributes: PlaylistPartialAttributes,
}
#[derive(Debug, Clone)]
pub struct PlaylistUpdateItemAttributes {
pub index: i32,
pub new_attributes: PlaylistPartialItemAttributes,
pub old_attributes: PlaylistPartialItemAttributes,
}
impl TryFrom<&PlaylistAttributesMessage> for PlaylistAttributes {
type Error = librespot_core::Error;
fn try_from(attributes: &PlaylistAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
name: attributes.get_name().to_owned(),
description: attributes.get_description().to_owned(),
picture: attributes.get_picture().to_owned(),
is_collaborative: attributes.get_collaborative(),
pl3_version: attributes.get_pl3_version().to_owned(),
is_deleted_by_owner: attributes.get_deleted_by_owner(),
client_id: attributes.get_client_id().to_owned(),
format: attributes.get_format().to_owned(),
format_attributes: attributes.get_format_attributes().into(),
picture_sizes: attributes.get_picture_size().into(),
})
}
}
impl From<&[PlaylistFormatAttributeMessage]> for PlaylistFormatAttribute {
fn from(attributes: &[PlaylistFormatAttributeMessage]) -> Self {
let format_attributes = attributes
.iter()
.map(|attribute| {
(
attribute.get_key().to_owned(),
attribute.get_value().to_owned(),
)
})
.collect();
PlaylistFormatAttribute(format_attributes)
}
}
impl TryFrom<&PlaylistItemAttributesMessage> for PlaylistItemAttributes {
type Error = librespot_core::Error;
fn try_from(attributes: &PlaylistItemAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
added_by: attributes.get_added_by().to_owned(),
timestamp: Date::from_timestamp_ms(attributes.get_timestamp())?,
seen_at: Date::from_timestamp_ms(attributes.get_seen_at())?,
is_public: attributes.get_public(),
format_attributes: attributes.get_format_attributes().into(),
item_id: attributes.get_item_id().to_owned(),
})
}
}
impl TryFrom<&PlaylistPartialAttributesMessage> for PlaylistPartialAttributes {
type Error = librespot_core::Error;
fn try_from(attributes: &PlaylistPartialAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
values: attributes.get_values().try_into()?,
no_value: attributes.get_no_value().into(),
})
}
}
impl TryFrom<&PlaylistPartialItemAttributesMessage> for PlaylistPartialItemAttributes {
type Error = librespot_core::Error;
fn try_from(attributes: &PlaylistPartialItemAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
values: attributes.get_values().try_into()?,
no_value: attributes.get_no_value().into(),
})
}
}
impl TryFrom<&PlaylistUpdateAttributesMessage> for PlaylistUpdateAttributes {
type Error = librespot_core::Error;
fn try_from(update: &PlaylistUpdateAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
new_attributes: update.get_new_attributes().try_into()?,
old_attributes: update.get_old_attributes().try_into()?,
})
}
}
impl TryFrom<&PlaylistUpdateItemAttributesMessage> for PlaylistUpdateItemAttributes {
type Error = librespot_core::Error;
fn try_from(update: &PlaylistUpdateItemAttributesMessage) -> Result<Self, Self::Error> {
Ok(Self {
index: update.get_index(),
new_attributes: update.get_new_attributes().try_into()?,
old_attributes: update.get_old_attributes().try_into()?,
})
}
}

View file

@ -0,0 +1,29 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
};
use super::operation::PlaylistOperations;
use librespot_core::SpotifyId;
use librespot_protocol as protocol;
use protocol::playlist4_external::Diff as DiffMessage;
#[derive(Debug, Clone)]
pub struct PlaylistDiff {
pub from_revision: SpotifyId,
pub operations: PlaylistOperations,
pub to_revision: SpotifyId,
}
impl TryFrom<&DiffMessage> for PlaylistDiff {
type Error = librespot_core::Error;
fn try_from(diff: &DiffMessage) -> Result<Self, Self::Error> {
Ok(Self {
from_revision: diff.get_from_revision().try_into()?,
operations: diff.get_ops().try_into()?,
to_revision: diff.get_to_revision().try_into()?,
})
}
}

View file

@ -0,0 +1,105 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::util::try_from_repeated_message;
use super::{
attribute::{PlaylistAttributes, PlaylistItemAttributes},
permission::Capabilities,
};
use librespot_core::{date::Date, SpotifyId};
use librespot_protocol as protocol;
use protocol::playlist4_external::Item as PlaylistItemMessage;
use protocol::playlist4_external::ListItems as PlaylistItemsMessage;
use protocol::playlist4_external::MetaItem as PlaylistMetaItemMessage;
#[derive(Debug, Clone)]
pub struct PlaylistItem {
pub id: SpotifyId,
pub attributes: PlaylistItemAttributes,
}
#[derive(Debug, Clone)]
pub struct PlaylistItems(pub Vec<PlaylistItem>);
impl Deref for PlaylistItems {
type Target = Vec<PlaylistItem>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct PlaylistItemList {
pub position: i32,
pub is_truncated: bool,
pub items: PlaylistItems,
pub meta_items: PlaylistMetaItems,
}
#[derive(Debug, Clone)]
pub struct PlaylistMetaItem {
pub revision: SpotifyId,
pub attributes: PlaylistAttributes,
pub length: i32,
pub timestamp: Date,
pub owner_username: String,
pub has_abuse_reporting: bool,
pub capabilities: Capabilities,
}
#[derive(Debug, Clone)]
pub struct PlaylistMetaItems(pub Vec<PlaylistMetaItem>);
impl Deref for PlaylistMetaItems {
type Target = Vec<PlaylistMetaItem>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl TryFrom<&PlaylistItemMessage> for PlaylistItem {
type Error = librespot_core::Error;
fn try_from(item: &PlaylistItemMessage) -> Result<Self, Self::Error> {
Ok(Self {
id: item.try_into()?,
attributes: item.get_attributes().try_into()?,
})
}
}
try_from_repeated_message!(PlaylistItemMessage, PlaylistItems);
impl TryFrom<&PlaylistItemsMessage> for PlaylistItemList {
type Error = librespot_core::Error;
fn try_from(list_items: &PlaylistItemsMessage) -> Result<Self, Self::Error> {
Ok(Self {
position: list_items.get_pos(),
is_truncated: list_items.get_truncated(),
items: list_items.get_items().try_into()?,
meta_items: list_items.get_meta_items().try_into()?,
})
}
}
impl TryFrom<&PlaylistMetaItemMessage> for PlaylistMetaItem {
type Error = librespot_core::Error;
fn try_from(item: &PlaylistMetaItemMessage) -> Result<Self, Self::Error> {
Ok(Self {
revision: item.try_into()?,
attributes: item.get_attributes().try_into()?,
length: item.get_length(),
timestamp: Date::from_timestamp_ms(item.get_timestamp())?,
owner_username: item.get_owner_username().to_owned(),
has_abuse_reporting: item.get_abuse_reporting_enabled(),
capabilities: item.get_capabilities().into(),
})
}
}
try_from_repeated_message!(PlaylistMetaItemMessage, PlaylistMetaItems);

View file

@ -0,0 +1,229 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use protobuf::Message;
use crate::{
request::{MercuryRequest, RequestResult},
util::{from_repeated_enum, try_from_repeated_message},
Metadata,
};
use super::{
attribute::PlaylistAttributes, diff::PlaylistDiff, item::PlaylistItemList,
permission::Capabilities,
};
use librespot_core::{
date::Date,
spotify_id::{NamedSpotifyId, SpotifyId},
Error, Session,
};
use librespot_protocol as protocol;
use protocol::playlist4_external::GeoblockBlockingType as Geoblock;
#[derive(Debug, Clone)]
pub struct Geoblocks(Vec<Geoblock>);
impl Deref for Geoblocks {
type Target = Vec<Geoblock>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct Playlist {
pub id: NamedSpotifyId,
pub revision: Vec<u8>,
pub length: i32,
pub attributes: PlaylistAttributes,
pub contents: PlaylistItemList,
pub diff: Option<PlaylistDiff>,
pub sync_result: Option<PlaylistDiff>,
pub resulting_revisions: Playlists,
pub has_multiple_heads: bool,
pub is_up_to_date: bool,
pub nonces: Vec<i64>,
pub timestamp: Date,
pub has_abuse_reporting: bool,
pub capabilities: Capabilities,
pub geoblocks: Geoblocks,
}
#[derive(Debug, Clone)]
pub struct Playlists(pub Vec<SpotifyId>);
impl Deref for Playlists {
type Target = Vec<SpotifyId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct RootPlaylist(pub SelectedListContent);
impl Deref for RootPlaylist {
type Target = SelectedListContent;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct SelectedListContent {
pub revision: Vec<u8>,
pub length: i32,
pub attributes: PlaylistAttributes,
pub contents: PlaylistItemList,
pub diff: Option<PlaylistDiff>,
pub sync_result: Option<PlaylistDiff>,
pub resulting_revisions: Playlists,
pub has_multiple_heads: bool,
pub is_up_to_date: bool,
pub nonces: Vec<i64>,
pub timestamp: Date,
pub owner_username: String,
pub has_abuse_reporting: bool,
pub capabilities: Capabilities,
pub geoblocks: Geoblocks,
}
impl Playlist {
#[allow(dead_code)]
async fn request_for_user(
session: &Session,
username: &str,
playlist_id: SpotifyId,
) -> RequestResult {
let uri = format!(
"hm://playlist/user/{}/playlist/{}",
username,
playlist_id.to_base62()?
);
<Self as MercuryRequest>::request(session, &uri).await
}
#[allow(dead_code)]
pub async fn get_for_user(
session: &Session,
username: &str,
playlist_id: SpotifyId,
) -> Result<Self, Error> {
let response = Self::request_for_user(session, username, playlist_id).await?;
let msg = <Self as Metadata>::Message::parse_from_bytes(&response)?;
Self::parse(&msg, playlist_id)
}
pub fn tracks(&self) -> Vec<SpotifyId> {
let tracks = self
.contents
.items
.iter()
.map(|item| item.id)
.collect::<Vec<_>>();
let length = tracks.len();
let expected_length = self.length as usize;
if length != expected_length {
warn!(
"Got {} tracks, but the list should contain {} tracks.",
length, expected_length,
);
}
tracks
}
pub fn name(&self) -> &str {
&self.attributes.name
}
}
impl MercuryRequest for Playlist {}
#[async_trait]
impl Metadata for Playlist {
type Message = protocol::playlist4_external::SelectedListContent;
async fn request(session: &Session, playlist_id: SpotifyId) -> RequestResult {
let uri = format!("hm://playlist/v2/playlist/{}", playlist_id.to_base62()?);
<Self as MercuryRequest>::request(session, &uri).await
}
fn parse(msg: &Self::Message, id: SpotifyId) -> Result<Self, Error> {
// the playlist proto doesn't contain the id so we decorate it
let playlist = SelectedListContent::try_from(msg)?;
let id = NamedSpotifyId::from_spotify_id(id, playlist.owner_username);
Ok(Self {
id,
revision: playlist.revision,
length: playlist.length,
attributes: playlist.attributes,
contents: playlist.contents,
diff: playlist.diff,
sync_result: playlist.sync_result,
resulting_revisions: playlist.resulting_revisions,
has_multiple_heads: playlist.has_multiple_heads,
is_up_to_date: playlist.is_up_to_date,
nonces: playlist.nonces,
timestamp: playlist.timestamp,
has_abuse_reporting: playlist.has_abuse_reporting,
capabilities: playlist.capabilities,
geoblocks: playlist.geoblocks,
})
}
}
impl MercuryRequest for RootPlaylist {}
impl RootPlaylist {
#[allow(dead_code)]
async fn request_for_user(session: &Session, username: &str) -> RequestResult {
let uri = format!("hm://playlist/user/{}/rootlist", username,);
<Self as MercuryRequest>::request(session, &uri).await
}
#[allow(dead_code)]
pub async fn get_root_for_user(session: &Session, username: &str) -> Result<Self, Error> {
let response = Self::request_for_user(session, username).await?;
let msg = protocol::playlist4_external::SelectedListContent::parse_from_bytes(&response)?;
Ok(Self(SelectedListContent::try_from(&msg)?))
}
}
impl TryFrom<&<Playlist as Metadata>::Message> for SelectedListContent {
type Error = librespot_core::Error;
fn try_from(playlist: &<Playlist as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
revision: playlist.get_revision().to_owned(),
length: playlist.get_length(),
attributes: playlist.get_attributes().try_into()?,
contents: playlist.get_contents().try_into()?,
diff: playlist.diff.as_ref().map(TryInto::try_into).transpose()?,
sync_result: playlist
.sync_result
.as_ref()
.map(TryInto::try_into)
.transpose()?,
resulting_revisions: playlist.get_resulting_revisions().try_into()?,
has_multiple_heads: playlist.get_multiple_heads(),
is_up_to_date: playlist.get_up_to_date(),
nonces: playlist.get_nonces().into(),
timestamp: Date::from_timestamp_ms(playlist.get_timestamp())?,
owner_username: playlist.get_owner_username().to_owned(),
has_abuse_reporting: playlist.get_abuse_reporting_enabled(),
capabilities: playlist.get_capabilities().into(),
geoblocks: playlist.get_geoblock().into(),
})
}
}
from_repeated_enum!(Geoblock, Geoblocks);
try_from_repeated_message!(Vec<u8>, Playlists);

View file

@ -0,0 +1,10 @@
pub mod annotation;
pub mod attribute;
pub mod diff;
pub mod item;
pub mod list;
pub mod operation;
pub mod permission;
pub use annotation::PlaylistAnnotation;
pub use list::Playlist;

View file

@ -0,0 +1,113 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{
playlist::{
attribute::{PlaylistUpdateAttributes, PlaylistUpdateItemAttributes},
item::PlaylistItems,
},
util::try_from_repeated_message,
};
use librespot_protocol as protocol;
use protocol::playlist4_external::Add as PlaylistAddMessage;
use protocol::playlist4_external::Mov as PlaylistMoveMessage;
use protocol::playlist4_external::Op as PlaylistOperationMessage;
pub use protocol::playlist4_external::Op_Kind as PlaylistOperationKind;
use protocol::playlist4_external::Rem as PlaylistRemoveMessage;
#[derive(Debug, Clone)]
pub struct PlaylistOperation {
pub kind: PlaylistOperationKind,
pub add: PlaylistOperationAdd,
pub rem: PlaylistOperationRemove,
pub mov: PlaylistOperationMove,
pub update_item_attributes: PlaylistUpdateItemAttributes,
pub update_list_attributes: PlaylistUpdateAttributes,
}
#[derive(Debug, Clone)]
pub struct PlaylistOperations(pub Vec<PlaylistOperation>);
impl Deref for PlaylistOperations {
type Target = Vec<PlaylistOperation>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct PlaylistOperationAdd {
pub from_index: i32,
pub items: PlaylistItems,
pub add_last: bool,
pub add_first: bool,
}
#[derive(Debug, Clone)]
pub struct PlaylistOperationMove {
pub from_index: i32,
pub length: i32,
pub to_index: i32,
}
#[derive(Debug, Clone)]
pub struct PlaylistOperationRemove {
pub from_index: i32,
pub length: i32,
pub items: PlaylistItems,
pub has_items_as_key: bool,
}
impl TryFrom<&PlaylistOperationMessage> for PlaylistOperation {
type Error = librespot_core::Error;
fn try_from(operation: &PlaylistOperationMessage) -> Result<Self, Self::Error> {
Ok(Self {
kind: operation.get_kind(),
add: operation.get_add().try_into()?,
rem: operation.get_rem().try_into()?,
mov: operation.get_mov().into(),
update_item_attributes: operation.get_update_item_attributes().try_into()?,
update_list_attributes: operation.get_update_list_attributes().try_into()?,
})
}
}
try_from_repeated_message!(PlaylistOperationMessage, PlaylistOperations);
impl TryFrom<&PlaylistAddMessage> for PlaylistOperationAdd {
type Error = librespot_core::Error;
fn try_from(add: &PlaylistAddMessage) -> Result<Self, Self::Error> {
Ok(Self {
from_index: add.get_from_index(),
items: add.get_items().try_into()?,
add_last: add.get_add_last(),
add_first: add.get_add_first(),
})
}
}
impl From<&PlaylistMoveMessage> for PlaylistOperationMove {
fn from(mov: &PlaylistMoveMessage) -> Self {
Self {
from_index: mov.get_from_index(),
length: mov.get_length(),
to_index: mov.get_to_index(),
}
}
}
impl TryFrom<&PlaylistRemoveMessage> for PlaylistOperationRemove {
type Error = librespot_core::Error;
fn try_from(remove: &PlaylistRemoveMessage) -> Result<Self, Self::Error> {
Ok(Self {
from_index: remove.get_from_index(),
length: remove.get_length(),
items: remove.get_items().try_into()?,
has_items_as_key: remove.get_items_as_key(),
})
}
}

View file

@ -0,0 +1,42 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::from_repeated_enum;
use librespot_protocol as protocol;
use protocol::playlist_permission::Capabilities as CapabilitiesMessage;
use protocol::playlist_permission::PermissionLevel;
#[derive(Debug, Clone)]
pub struct Capabilities {
pub can_view: bool,
pub can_administrate_permissions: bool,
pub grantable_levels: PermissionLevels,
pub can_edit_metadata: bool,
pub can_edit_items: bool,
pub can_cancel_membership: bool,
}
#[derive(Debug, Clone)]
pub struct PermissionLevels(pub Vec<PermissionLevel>);
impl Deref for PermissionLevels {
type Target = Vec<PermissionLevel>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<&CapabilitiesMessage> for Capabilities {
fn from(playlist: &CapabilitiesMessage) -> Self {
Self {
can_view: playlist.get_can_view(),
can_administrate_permissions: playlist.get_can_administrate_permissions(),
grantable_levels: playlist.get_grantable_level().into(),
can_edit_metadata: playlist.get_can_edit_metadata(),
can_edit_items: playlist.get_can_edit_items(),
can_cancel_membership: playlist.get_can_cancel_membership(),
}
}
}
from_repeated_enum!(PermissionLevel, PermissionLevels);

37
metadata/src/request.rs Normal file
View file

@ -0,0 +1,37 @@
use std::fmt::Write;
use crate::MetadataError;
use librespot_core::{Error, Session};
pub type RequestResult = Result<bytes::Bytes, Error>;
#[async_trait]
pub trait MercuryRequest {
async fn request(session: &Session, uri: &str) -> RequestResult {
let mut metrics_uri = uri.to_owned();
let separator = match metrics_uri.find('?') {
Some(_) => "&",
None => "?",
};
let _ = write!(metrics_uri, "{}country={}", separator, session.country());
if let Some(product) = session.get_user_attribute("type") {
let _ = write!(metrics_uri, "&product={}", product);
}
trace!("Requesting {}", metrics_uri);
let request = session.mercury().get(metrics_uri)?;
let response = request.await?;
match response.payload.first() {
Some(data) => {
let data = data.to_vec().into();
trace!("Received metadata: {:?}", data);
Ok(data)
}
None => Err(Error::unavailable(MetadataError::Empty)),
}
}
}

104
metadata/src/restriction.rs Normal file
View file

@ -0,0 +1,104 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::{from_repeated_enum, from_repeated_message};
use protocol::metadata::Restriction as RestrictionMessage;
use librespot_protocol as protocol;
pub use protocol::metadata::Restriction_Catalogue as RestrictionCatalogue;
pub use protocol::metadata::Restriction_Type as RestrictionType;
#[derive(Debug, Clone)]
pub struct Restriction {
pub catalogues: RestrictionCatalogues,
pub restriction_type: RestrictionType,
pub catalogue_strs: Vec<String>,
pub countries_allowed: Option<Vec<String>>,
pub countries_forbidden: Option<Vec<String>>,
}
#[derive(Debug, Clone)]
pub struct Restrictions(pub Vec<Restriction>);
impl Deref for Restrictions {
type Target = Vec<Restriction>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Clone)]
pub struct RestrictionCatalogues(pub Vec<RestrictionCatalogue>);
impl Deref for RestrictionCatalogues {
type Target = Vec<RestrictionCatalogue>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Restriction {
fn parse_country_codes(country_codes: &str) -> Vec<String> {
country_codes
.chunks(2)
.map(|country_code| country_code.to_owned())
.collect()
}
}
impl From<&RestrictionMessage> for Restriction {
fn from(restriction: &RestrictionMessage) -> Self {
let countries_allowed = if restriction.has_countries_allowed() {
Some(Self::parse_country_codes(
restriction.get_countries_allowed(),
))
} else {
None
};
let countries_forbidden = if restriction.has_countries_forbidden() {
Some(Self::parse_country_codes(
restriction.get_countries_forbidden(),
))
} else {
None
};
Self {
catalogues: restriction.get_catalogue().into(),
restriction_type: restriction.get_field_type(),
catalogue_strs: restriction.get_catalogue_str().to_vec(),
countries_allowed,
countries_forbidden,
}
}
}
from_repeated_message!(RestrictionMessage, Restrictions);
from_repeated_enum!(RestrictionCatalogue, RestrictionCatalogues);
struct StrChunks<'s>(&'s str, usize);
trait StrChunksExt {
fn chunks(&self, size: usize) -> StrChunks;
}
impl StrChunksExt for str {
fn chunks(&self, size: usize) -> StrChunks {
StrChunks(self, size)
}
}
impl<'s> Iterator for StrChunks<'s> {
type Item = &'s str;
fn next(&mut self) -> Option<&'s str> {
let &mut StrChunks(data, size) = self;
if data.is_empty() {
None
} else {
let ret = Some(&data[..size]);
self.0 = &data[size..];
ret
}
}
}

View file

@ -0,0 +1,42 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use crate::{restriction::Restrictions, util::try_from_repeated_message};
use librespot_core::date::Date;
use librespot_protocol as protocol;
use protocol::metadata::SalePeriod as SalePeriodMessage;
#[derive(Debug, Clone)]
pub struct SalePeriod {
pub restrictions: Restrictions,
pub start: Date,
pub end: Date,
}
#[derive(Debug, Clone)]
pub struct SalePeriods(pub Vec<SalePeriod>);
impl Deref for SalePeriods {
type Target = Vec<SalePeriod>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl TryFrom<&SalePeriodMessage> for SalePeriod {
type Error = librespot_core::Error;
fn try_from(sale_period: &SalePeriodMessage) -> Result<Self, Self::Error> {
Ok(Self {
restrictions: sale_period.get_restriction().into(),
start: sale_period.get_start().try_into()?,
end: sale_period.get_end().try_into()?,
})
}
}
try_from_repeated_message!(SalePeriodMessage, SalePeriods);

74
metadata/src/show.rs Normal file
View file

@ -0,0 +1,74 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
};
use crate::{
availability::Availabilities, copyright::Copyrights, episode::Episodes, image::Images,
restriction::Restrictions, Metadata, RequestResult,
};
use librespot_core::{Error, Session, SpotifyId};
use librespot_protocol as protocol;
pub use protocol::metadata::Show_ConsumptionOrder as ShowConsumptionOrder;
pub use protocol::metadata::Show_MediaType as ShowMediaType;
#[derive(Debug, Clone)]
pub struct Show {
pub id: SpotifyId,
pub name: String,
pub description: String,
pub publisher: String,
pub language: String,
pub is_explicit: bool,
pub covers: Images,
pub episodes: Episodes,
pub copyrights: Copyrights,
pub restrictions: Restrictions,
pub keywords: Vec<String>,
pub media_type: ShowMediaType,
pub consumption_order: ShowConsumptionOrder,
pub availability: Availabilities,
pub trailer_uri: SpotifyId,
pub has_music_and_talk: bool,
pub is_audiobook: bool,
}
#[async_trait]
impl Metadata for Show {
type Message = protocol::metadata::Show;
async fn request(session: &Session, show_id: SpotifyId) -> RequestResult {
session.spclient().get_show_metadata(show_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Show {
type Error = librespot_core::Error;
fn try_from(show: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: show.try_into()?,
name: show.get_name().to_owned(),
description: show.get_description().to_owned(),
publisher: show.get_publisher().to_owned(),
language: show.get_language().to_owned(),
is_explicit: show.get_explicit(),
covers: show.get_cover_image().get_image().into(),
episodes: show.get_episode().try_into()?,
copyrights: show.get_copyright().into(),
restrictions: show.get_restriction().into(),
keywords: show.get_keyword().to_vec(),
media_type: show.get_media_type(),
consumption_order: show.get_consumption_order(),
availability: show.get_availability().try_into()?,
trailer_uri: SpotifyId::from_uri(show.get_trailer_uri())?,
has_music_and_talk: show.get_music_and_talk(),
is_audiobook: show.get_is_audiobook(),
})
}
}

149
metadata/src/track.rs Normal file
View file

@ -0,0 +1,149 @@
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::Deref,
};
use uuid::Uuid;
use crate::{
artist::{Artists, ArtistsWithRole},
audio::{
file::AudioFiles,
item::{AudioItem, AudioItemResult, InnerAudioItem},
},
availability::{Availabilities, UnavailabilityReason},
content_rating::ContentRatings,
external_id::ExternalIds,
restriction::Restrictions,
sale_period::SalePeriods,
util::try_from_repeated_message,
Metadata, RequestResult,
};
use librespot_core::{date::Date, Error, Session, SpotifyId};
use librespot_protocol as protocol;
#[derive(Debug, Clone)]
pub struct Track {
pub id: SpotifyId,
pub name: String,
pub album: SpotifyId,
pub artists: Artists,
pub number: i32,
pub disc_number: i32,
pub duration: i32,
pub popularity: i32,
pub is_explicit: bool,
pub external_ids: ExternalIds,
pub restrictions: Restrictions,
pub files: AudioFiles,
pub alternatives: Tracks,
pub sale_periods: SalePeriods,
pub previews: AudioFiles,
pub tags: Vec<String>,
pub earliest_live_timestamp: Date,
pub has_lyrics: bool,
pub availability: Availabilities,
pub licensor: Uuid,
pub language_of_performance: Vec<String>,
pub content_ratings: ContentRatings,
pub original_title: String,
pub version_title: String,
pub artists_with_role: ArtistsWithRole,
}
#[derive(Debug, Clone)]
pub struct Tracks(pub Vec<SpotifyId>);
impl Deref for Tracks {
type Target = Vec<SpotifyId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[async_trait]
impl InnerAudioItem for Track {
async fn get_audio_item(session: &Session, id: SpotifyId) -> AudioItemResult {
let track = Self::get(session, id).await?;
let alternatives = {
if track.alternatives.is_empty() {
None
} else {
Some(track.alternatives.clone())
}
};
// TODO: check meaning of earliest_live_timestamp in
let availability = if Date::now_utc() < track.earliest_live_timestamp {
Err(UnavailabilityReason::Embargo)
} else {
Self::available_for_user(
&session.user_data(),
&track.availability,
&track.restrictions,
)
};
Ok(AudioItem {
id,
spotify_uri: id.to_uri()?,
files: track.files,
name: track.name,
duration: track.duration,
availability,
alternatives,
is_explicit: track.is_explicit,
})
}
}
#[async_trait]
impl Metadata for Track {
type Message = protocol::metadata::Track;
async fn request(session: &Session, track_id: SpotifyId) -> RequestResult {
session.spclient().get_track_metadata(track_id).await
}
fn parse(msg: &Self::Message, _: SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Track {
type Error = librespot_core::Error;
fn try_from(track: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: track.try_into()?,
name: track.get_name().to_owned(),
album: track.get_album().try_into()?,
artists: track.get_artist().try_into()?,
number: track.get_number(),
disc_number: track.get_disc_number(),
duration: track.get_duration(),
popularity: track.get_popularity(),
is_explicit: track.get_explicit(),
external_ids: track.get_external_id().into(),
restrictions: track.get_restriction().into(),
files: track.get_file().into(),
alternatives: track.get_alternative().try_into()?,
sale_periods: track.get_sale_period().try_into()?,
previews: track.get_preview().into(),
tags: track.get_tags().to_vec(),
earliest_live_timestamp: Date::from_timestamp_ms(track.get_earliest_live_timestamp())?,
has_lyrics: track.get_has_lyrics(),
availability: track.get_availability().try_into()?,
licensor: Uuid::from_slice(track.get_licensor().get_uuid())
.unwrap_or_else(|_| Uuid::nil()),
language_of_performance: track.get_language_of_performance().to_vec(),
content_ratings: track.get_content_rating().into(),
original_title: track.get_original_title().to_owned(),
version_title: track.get_version_title().to_owned(),
artists_with_role: track.get_artist_with_role().try_into()?,
})
}
}
try_from_repeated_message!(<Track as Metadata>::Message, Tracks);

39
metadata/src/util.rs Normal file
View file

@ -0,0 +1,39 @@
macro_rules! from_repeated_message {
($src:ty, $dst:ty) => {
impl From<&[$src]> for $dst {
fn from(src: &[$src]) -> Self {
let result = src.iter().map(From::from).collect();
Self(result)
}
}
};
}
pub(crate) use from_repeated_message;
macro_rules! from_repeated_enum {
($src:ty, $dst:ty) => {
impl From<&[$src]> for $dst {
fn from(src: &[$src]) -> Self {
let result = src.iter().map(|x| <$src>::from(*x)).collect();
Self(result)
}
}
};
}
pub(crate) use from_repeated_enum;
macro_rules! try_from_repeated_message {
($src:ty, $dst:ty) => {
impl TryFrom<&[$src]> for $dst {
type Error = librespot_core::Error;
fn try_from(src: &[$src]) -> Result<Self, Self::Error> {
let result: Result<Vec<_>, _> = src.iter().map(TryFrom::try_from).collect();
Ok(Self(result?))
}
}
};
}
pub(crate) use try_from_repeated_message;

20
metadata/src/video.rs Normal file
View file

@ -0,0 +1,20 @@
use std::{fmt::Debug, ops::Deref};
use crate::util::from_repeated_message;
use librespot_core::FileId;
use librespot_protocol as protocol;
use protocol::metadata::VideoFile as VideoFileMessage;
#[derive(Debug, Clone)]
pub struct VideoFiles(pub Vec<FileId>);
impl Deref for VideoFiles {
type Target = Vec<FileId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
from_repeated_message!(VideoFileMessage, VideoFiles);

View file

@ -18,15 +18,14 @@ path = "../metadata"
version = "0.4.2" version = "0.4.2"
[dependencies] [dependencies]
futures-executor = "0.3" byteorder = "1"
futures-util = { version = "0.3", default_features = false, features = ["alloc"] } futures-util = "0.3"
log = "0.4" log = "0.4"
parking_lot = { version = "0.12", features = ["deadlock_detection"] } parking_lot = { version = "0.12", features = ["deadlock_detection"] }
byteorder = "1.4" shell-words = "1.1"
shell-words = "1.0.0" thiserror = "1"
tokio = { version = "1", features = ["sync", "parking_lot"] } tokio = { version = "1", features = ["parking_lot", "rt", "rt-multi-thread", "sync"] }
zerocopy = { version = "0.6" } zerocopy = "0.6"
thiserror = { version = "1" }
# Backends # Backends
alsa = { version = "0.6", optional = true } alsa = { version = "0.6", optional = true }
@ -44,9 +43,11 @@ glib = { version = "0.15", optional = true }
rodio = { version = "0.15", optional = true, default-features = false } rodio = { version = "0.15", optional = true, default-features = false }
cpal = { version = "0.13", optional = true } cpal = { version = "0.13", optional = true }
# Decoder # Container and audio decoder
lewton = "0.10" symphonia = { version = "0.5", default-features = false, features = ["mp3", "ogg", "vorbis"] }
ogg = "0.8"
# Legacy Ogg container decoder for the passthrough decoder
ogg = { version = "0.8", optional = true }
# Dithering # Dithering
rand = { version = "0.8", features = ["small_rng"] } rand = { version = "0.8", features = ["small_rng"] }
@ -61,3 +62,5 @@ rodio-backend = ["rodio", "cpal"]
rodiojack-backend = ["rodio", "cpal/jack"] rodiojack-backend = ["rodio", "cpal/jack"]
sdl-backend = ["sdl2"] sdl-backend = ["sdl2"]
gstreamer-backend = ["gstreamer", "gstreamer-app", "gstreamer-audio", "glib"] gstreamer-backend = ["gstreamer", "gstreamer-app", "gstreamer-audio", "glib"]
passthrough-decoder = ["ogg"]

View file

@ -141,10 +141,10 @@ impl Sink for GstreamerSink {
self.appsrc.send_event(FlushStop::new(true)); self.appsrc.send_event(FlushStop::new(true));
self.bufferpool self.bufferpool
.set_active(true) .set_active(true)
.map_err(|e| SinkError::OnWrite(e.to_string()))?; .map_err(|e| SinkError::StateChange(e.to_string()))?;
self.pipeline self.pipeline
.set_state(State::Playing) .set_state(State::Playing)
.map_err(|e| SinkError::OnWrite(e.to_string()))?; .map_err(|e| SinkError::StateChange(e.to_string()))?;
Ok(()) Ok(())
} }
@ -153,10 +153,10 @@ impl Sink for GstreamerSink {
self.appsrc.send_event(FlushStart::new()); self.appsrc.send_event(FlushStart::new());
self.pipeline self.pipeline
.set_state(State::Paused) .set_state(State::Paused)
.map_err(|e| SinkError::OnWrite(e.to_string()))?; .map_err(|e| SinkError::StateChange(e.to_string()))?;
self.bufferpool self.bufferpool
.set_active(false) .set_active(false)
.map_err(|e| SinkError::OnWrite(e.to_string()))?; .map_err(|e| SinkError::StateChange(e.to_string()))?;
Ok(()) Ok(())
} }

View file

@ -13,6 +13,8 @@ pub enum SinkError {
OnWrite(String), OnWrite(String),
#[error("Audio Sink Error Invalid Parameters: {0}")] #[error("Audio Sink Error Invalid Parameters: {0}")]
InvalidParams(String), InvalidParams(String),
#[error("Audio Sink Error Changing State: {0}")]
StateChange(String),
} }
pub type SinkResult<T> = Result<T, SinkError>; pub type SinkResult<T> = Result<T, SinkError>;
@ -71,7 +73,7 @@ macro_rules! sink_as_bytes {
self.write_bytes(samples_s16.as_bytes()) self.write_bytes(samples_s16.as_bytes())
} }
}, },
AudioPacket::OggData(samples) => self.write_bytes(&samples), AudioPacket::Raw(samples) => self.write_bytes(&samples),
} }
} }
}; };

View file

@ -186,6 +186,17 @@ pub fn open(host: cpal::Host, device: Option<String>, format: AudioFormat) -> Ro
} }
impl Sink for RodioSink { impl Sink for RodioSink {
fn start(&mut self) -> SinkResult<()> {
self.rodio_sink.play();
Ok(())
}
fn stop(&mut self) -> SinkResult<()> {
self.rodio_sink.sleep_until_end();
self.rodio_sink.pause();
Ok(())
}
fn write(&mut self, packet: AudioPacket, converter: &mut Converter) -> SinkResult<()> { fn write(&mut self, packet: AudioPacket, converter: &mut Converter) -> SinkResult<()> {
let samples = packet let samples = packet
.samples() .samples()

View file

@ -95,7 +95,7 @@ impl Sink for SdlSink {
let samples = packet let samples = packet
.samples() .samples()
.map_err(|e| SinkError::OnWrite(e.to_string()))?; .map_err(|e| SinkError::OnWrite(e.to_string()))?;
match self { let result = match self {
Self::F32(queue) => { Self::F32(queue) => {
let samples_f32: &[f32] = &converter.f64_to_f32(samples); let samples_f32: &[f32] = &converter.f64_to_f32(samples);
drain_sink!(queue, AudioFormat::F32.size()); drain_sink!(queue, AudioFormat::F32.size());
@ -111,9 +111,8 @@ impl Sink for SdlSink {
drain_sink!(queue, AudioFormat::S16.size()); drain_sink!(queue, AudioFormat::S16.size());
queue.queue_audio(samples_s16) queue.queue_audio(samples_s16)
} }
} };
.map_err(SinkError::OnWrite)?; result.map_err(SinkError::OnWrite)
Ok(())
} }
} }

View file

@ -1,46 +0,0 @@
use super::{AudioDecoder, AudioPacket, DecoderError, DecoderResult};
use lewton::audio::AudioReadError::AudioIsHeader;
use lewton::inside_ogg::OggStreamReader;
use lewton::samples::InterleavedSamples;
use lewton::OggReadError::NoCapturePatternFound;
use lewton::VorbisError::{BadAudio, OggError};
use std::io::{Read, Seek};
pub struct VorbisDecoder<R: Read + Seek>(OggStreamReader<R>);
impl<R> VorbisDecoder<R>
where
R: Read + Seek,
{
pub fn new(input: R) -> DecoderResult<VorbisDecoder<R>> {
let reader =
OggStreamReader::new(input).map_err(|e| DecoderError::LewtonDecoder(e.to_string()))?;
Ok(VorbisDecoder(reader))
}
}
impl<R> AudioDecoder for VorbisDecoder<R>
where
R: Read + Seek,
{
fn seek(&mut self, absgp: u64) -> DecoderResult<()> {
self.0
.seek_absgp_pg(absgp)
.map_err(|e| DecoderError::LewtonDecoder(e.to_string()))?;
Ok(())
}
fn next_packet(&mut self) -> DecoderResult<Option<AudioPacket>> {
loop {
match self.0.read_dec_packet_generic::<InterleavedSamples<f32>>() {
Ok(Some(packet)) => return Ok(Some(AudioPacket::samples_from_f32(packet.samples))),
Ok(None) => return Ok(None),
Err(BadAudio(AudioIsHeader)) => (),
Err(OggError(NoCapturePatternFound)) => (),
Err(e) => return Err(DecoderError::LewtonDecoder(e.to_string())),
}
}
}
}

View file

@ -1,26 +1,30 @@
use std::ops::Deref;
use thiserror::Error; use thiserror::Error;
mod lewton_decoder; #[cfg(feature = "passthrough-decoder")]
pub use lewton_decoder::VorbisDecoder;
mod passthrough_decoder; mod passthrough_decoder;
#[cfg(feature = "passthrough-decoder")]
pub use passthrough_decoder::PassthroughDecoder; pub use passthrough_decoder::PassthroughDecoder;
mod symphonia_decoder;
pub use symphonia_decoder::SymphoniaDecoder;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum DecoderError { pub enum DecoderError {
#[error("Lewton Decoder Error: {0}")]
LewtonDecoder(String),
#[error("Passthrough Decoder Error: {0}")] #[error("Passthrough Decoder Error: {0}")]
PassthroughDecoder(String), PassthroughDecoder(String),
#[error("Symphonia Decoder Error: {0}")]
SymphoniaDecoder(String),
} }
pub type DecoderResult<T> = Result<T, DecoderError>; pub type DecoderResult<T> = Result<T, DecoderError>;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum AudioPacketError { pub enum AudioPacketError {
#[error("Decoder OggData Error: Can't return OggData on Samples")] #[error("Decoder Raw Error: Can't return Raw on Samples")]
OggData, Raw,
#[error("Decoder Samples Error: Can't return Samples on OggData")] #[error("Decoder Samples Error: Can't return Samples on Raw")]
Samples, Samples,
} }
@ -28,25 +32,20 @@ pub type AudioPacketResult<T> = Result<T, AudioPacketError>;
pub enum AudioPacket { pub enum AudioPacket {
Samples(Vec<f64>), Samples(Vec<f64>),
OggData(Vec<u8>), Raw(Vec<u8>),
} }
impl AudioPacket { impl AudioPacket {
pub fn samples_from_f32(f32_samples: Vec<f32>) -> Self {
let f64_samples = f32_samples.iter().map(|sample| *sample as f64).collect();
AudioPacket::Samples(f64_samples)
}
pub fn samples(&self) -> AudioPacketResult<&[f64]> { pub fn samples(&self) -> AudioPacketResult<&[f64]> {
match self { match self {
AudioPacket::Samples(s) => Ok(s), AudioPacket::Samples(s) => Ok(s),
AudioPacket::OggData(_) => Err(AudioPacketError::OggData), AudioPacket::Raw(_) => Err(AudioPacketError::Raw),
} }
} }
pub fn oggdata(&self) -> AudioPacketResult<&[u8]> { pub fn raw(&self) -> AudioPacketResult<&[u8]> {
match self { match self {
AudioPacket::OggData(d) => Ok(d), AudioPacket::Raw(d) => Ok(d),
AudioPacket::Samples(_) => Err(AudioPacketError::Samples), AudioPacket::Samples(_) => Err(AudioPacketError::Samples),
} }
} }
@ -54,12 +53,37 @@ impl AudioPacket {
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
match self { match self {
AudioPacket::Samples(s) => s.is_empty(), AudioPacket::Samples(s) => s.is_empty(),
AudioPacket::OggData(d) => d.is_empty(), AudioPacket::Raw(d) => d.is_empty(),
} }
} }
} }
pub trait AudioDecoder { #[derive(Debug, Clone)]
fn seek(&mut self, absgp: u64) -> DecoderResult<()>; pub struct AudioPacketPosition {
fn next_packet(&mut self) -> DecoderResult<Option<AudioPacket>>; pub position_ms: u32,
pub skipped: bool,
}
impl Deref for AudioPacketPosition {
type Target = u32;
fn deref(&self) -> &Self::Target {
&self.position_ms
}
}
pub trait AudioDecoder {
fn seek(&mut self, position_ms: u32) -> Result<u32, DecoderError>;
fn next_packet(&mut self) -> DecoderResult<Option<(AudioPacketPosition, AudioPacket)>>;
}
impl From<DecoderError> for librespot_core::error::Error {
fn from(err: DecoderError) -> Self {
librespot_core::error::Error::aborted(err)
}
}
impl From<symphonia::core::errors::Error> for DecoderError {
fn from(err: symphonia::core::errors::Error) -> Self {
Self::SymphoniaDecoder(err.to_string())
}
} }

View file

@ -1,8 +1,18 @@
// Passthrough decoder for librespot // Passthrough decoder for librespot
use super::{AudioDecoder, AudioPacket, DecoderError, DecoderResult}; use std::{
io::{Read, Seek},
time::{SystemTime, UNIX_EPOCH},
};
// TODO: move this to the Symphonia Ogg demuxer
use ogg::{OggReadError, Packet, PacketReader, PacketWriteEndInfo, PacketWriter}; use ogg::{OggReadError, Packet, PacketReader, PacketWriteEndInfo, PacketWriter};
use std::io::{Read, Seek};
use std::time::{SystemTime, UNIX_EPOCH}; use super::{AudioDecoder, AudioPacket, AudioPacketPosition, DecoderError, DecoderResult};
use crate::{
metadata::audio::{AudioFileFormat, AudioFiles},
MS_PER_PAGE, PAGES_PER_MS,
};
fn get_header<T>(code: u8, rdr: &mut PacketReader<T>) -> DecoderResult<Box<[u8]>> fn get_header<T>(code: u8, rdr: &mut PacketReader<T>) -> DecoderResult<Box<[u8]>>
where where
@ -16,7 +26,7 @@ where
debug!("Vorbis header type {}", &pkt_type); debug!("Vorbis header type {}", &pkt_type);
if pkt_type != code { if pkt_type != code {
return Err(DecoderError::PassthroughDecoder("Invalid Data".to_string())); return Err(DecoderError::PassthroughDecoder("Invalid Data".into()));
} }
Ok(pck.data.into_boxed_slice()) Ok(pck.data.into_boxed_slice())
@ -36,7 +46,14 @@ pub struct PassthroughDecoder<R: Read + Seek> {
impl<R: Read + Seek> PassthroughDecoder<R> { impl<R: Read + Seek> PassthroughDecoder<R> {
/// Constructs a new Decoder from a given implementation of `Read + Seek`. /// Constructs a new Decoder from a given implementation of `Read + Seek`.
pub fn new(rdr: R) -> DecoderResult<Self> { pub fn new(rdr: R, format: AudioFileFormat) -> DecoderResult<Self> {
if !AudioFiles::is_ogg_vorbis(format) {
return Err(DecoderError::PassthroughDecoder(format!(
"Passthrough decoder is not implemented for format {:?}",
format
)));
}
let mut rdr = PacketReader::new(rdr); let mut rdr = PacketReader::new(rdr);
let since_epoch = SystemTime::now() let since_epoch = SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
@ -65,10 +82,16 @@ impl<R: Read + Seek> PassthroughDecoder<R> {
bos: false, bos: false,
}) })
} }
fn position_pcm_to_ms(position_pcm: u64) -> u32 {
(position_pcm as f64 * MS_PER_PAGE) as u32
}
} }
impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> { impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
fn seek(&mut self, absgp: u64) -> DecoderResult<()> { fn seek(&mut self, position_ms: u32) -> Result<u32, DecoderError> {
let absgp = (position_ms as f64 * PAGES_PER_MS) as u64;
// add an eos to previous stream if missing // add an eos to previous stream if missing
if self.bos && !self.eos { if self.bos && !self.eos {
match self.rdr.read_packet() { match self.rdr.read_packet() {
@ -101,20 +124,20 @@ impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
.map_err(|e| DecoderError::PassthroughDecoder(e.to_string()))?; .map_err(|e| DecoderError::PassthroughDecoder(e.to_string()))?;
match pck { match pck {
Some(pck) => { Some(pck) => {
self.ofsgp_page = pck.absgp_page(); let new_page = pck.absgp_page();
debug!("Seek to offset page {}", self.ofsgp_page); self.ofsgp_page = new_page;
Ok(()) debug!("Seek to offset page {}", new_page);
let new_position_ms = Self::position_pcm_to_ms(new_page);
Ok(new_position_ms)
} }
None => Err(DecoderError::PassthroughDecoder( None => Err(DecoderError::PassthroughDecoder("Packet is None".into())),
"Packet is None".to_string(),
)),
} }
} }
Err(e) => Err(DecoderError::PassthroughDecoder(e.to_string())), Err(e) => Err(DecoderError::PassthroughDecoder(e.to_string())),
} }
} }
fn next_packet(&mut self) -> DecoderResult<Option<AudioPacket>> { fn next_packet(&mut self) -> DecoderResult<Option<(AudioPacketPosition, AudioPacket)>> {
// write headers if we are (re)starting // write headers if we are (re)starting
if !self.bos { if !self.bos {
self.wtr self.wtr
@ -184,8 +207,15 @@ impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
let data = self.wtr.inner_mut(); let data = self.wtr.inner_mut();
if !data.is_empty() { if !data.is_empty() {
let ogg_data = AudioPacket::OggData(std::mem::take(data)); let position_ms = Self::position_pcm_to_ms(pckgp_page);
return Ok(Some(ogg_data)); let packet_position = AudioPacketPosition {
position_ms,
skipped: false,
};
let ogg_data = AudioPacket::Raw(std::mem::take(data));
return Ok(Some((packet_position, ogg_data)));
} }
} }
} }

View file

@ -0,0 +1,226 @@
use std::io;
use symphonia::{
core::{
audio::SampleBuffer,
codecs::{Decoder, DecoderOptions},
errors::Error,
formats::{FormatOptions, FormatReader, SeekMode, SeekTo},
io::{MediaSource, MediaSourceStream, MediaSourceStreamOptions},
meta::{StandardTagKey, Value},
units::Time,
},
default::{
codecs::{Mp3Decoder, VorbisDecoder},
formats::{Mp3Reader, OggReader},
},
};
use super::{AudioDecoder, AudioPacket, AudioPacketPosition, DecoderError, DecoderResult};
use crate::{
metadata::audio::{AudioFileFormat, AudioFiles},
player::NormalisationData,
NUM_CHANNELS, PAGES_PER_MS, SAMPLE_RATE,
};
pub struct SymphoniaDecoder {
format: Box<dyn FormatReader>,
decoder: Box<dyn Decoder>,
sample_buffer: Option<SampleBuffer<f64>>,
}
impl SymphoniaDecoder {
pub fn new<R>(input: R, file_format: AudioFileFormat) -> DecoderResult<Self>
where
R: MediaSource + 'static,
{
let mss_opts = MediaSourceStreamOptions {
buffer_len: librespot_audio::MINIMUM_DOWNLOAD_SIZE,
};
let mss = MediaSourceStream::new(Box::new(input), mss_opts);
let format_opts = FormatOptions {
enable_gapless: true,
..Default::default()
};
let format: Box<dyn FormatReader> = if AudioFiles::is_ogg_vorbis(file_format) {
Box::new(OggReader::try_new(mss, &format_opts)?)
} else if AudioFiles::is_mp3(file_format) {
Box::new(Mp3Reader::try_new(mss, &format_opts)?)
} else {
return Err(DecoderError::SymphoniaDecoder(format!(
"Unsupported format: {:?}",
file_format
)));
};
let track = format.default_track().ok_or_else(|| {
DecoderError::SymphoniaDecoder("Could not retrieve default track".into())
})?;
let decoder_opts: DecoderOptions = Default::default();
let decoder: Box<dyn Decoder> = if AudioFiles::is_ogg_vorbis(file_format) {
Box::new(VorbisDecoder::try_new(&track.codec_params, &decoder_opts)?)
} else if AudioFiles::is_mp3(file_format) {
Box::new(Mp3Decoder::try_new(&track.codec_params, &decoder_opts)?)
} else {
return Err(DecoderError::SymphoniaDecoder(format!(
"Unsupported decoder: {:?}",
file_format
)));
};
let rate = decoder.codec_params().sample_rate.ok_or_else(|| {
DecoderError::SymphoniaDecoder("Could not retrieve sample rate".into())
})?;
if rate != SAMPLE_RATE {
return Err(DecoderError::SymphoniaDecoder(format!(
"Unsupported sample rate: {}",
rate
)));
}
let channels = decoder.codec_params().channels.ok_or_else(|| {
DecoderError::SymphoniaDecoder("Could not retrieve channel configuration".into())
})?;
if channels.count() != NUM_CHANNELS as usize {
return Err(DecoderError::SymphoniaDecoder(format!(
"Unsupported number of channels: {}",
channels
)));
}
Ok(Self {
format,
decoder,
// We set the sample buffer when decoding the first full packet,
// whose duration is also the ideal sample buffer size.
sample_buffer: None,
})
}
pub fn normalisation_data(&mut self) -> Option<NormalisationData> {
let mut metadata = self.format.metadata();
// Advance to the latest metadata revision.
// None means we hit the latest.
loop {
if metadata.pop().is_none() {
break;
}
}
let tags = metadata.current()?.tags();
if tags.is_empty() {
None
} else {
let mut data = NormalisationData::default();
for tag in tags {
if let Value::Float(value) = tag.value {
match tag.std_key {
Some(StandardTagKey::ReplayGainAlbumGain) => data.album_gain_db = value,
Some(StandardTagKey::ReplayGainAlbumPeak) => data.album_peak = value,
Some(StandardTagKey::ReplayGainTrackGain) => data.track_gain_db = value,
Some(StandardTagKey::ReplayGainTrackPeak) => data.track_peak = value,
_ => (),
}
}
}
Some(data)
}
}
fn ts_to_ms(&self, ts: u64) -> u32 {
let time_base = self.decoder.codec_params().time_base;
let seeked_to_ms = match time_base {
Some(time_base) => {
let time = time_base.calc_time(ts);
(time.seconds as f64 + time.frac) * 1000.
}
// Fallback in the unexpected case that the format has no base time set.
None => (ts as f64 * PAGES_PER_MS),
};
seeked_to_ms as u32
}
}
impl AudioDecoder for SymphoniaDecoder {
fn seek(&mut self, position_ms: u32) -> Result<u32, DecoderError> {
let seconds = position_ms as u64 / 1000;
let frac = (position_ms as f64 % 1000.) / 1000.;
let time = Time::new(seconds, frac);
// `track_id: None` implies the default track ID (of the container, not of Spotify).
let seeked_to_ts = self.format.seek(
SeekMode::Accurate,
SeekTo::Time {
time,
track_id: None,
},
)?;
// Seeking is a `FormatReader` operation, so the decoder cannot reliably
// know when a seek took place. Reset it to avoid audio glitches.
self.decoder.reset();
Ok(self.ts_to_ms(seeked_to_ts.actual_ts))
}
fn next_packet(&mut self) -> DecoderResult<Option<(AudioPacketPosition, AudioPacket)>> {
let mut skipped = false;
loop {
let packet = match self.format.next_packet() {
Ok(packet) => packet,
Err(Error::IoError(err)) => {
if err.kind() == io::ErrorKind::UnexpectedEof {
return Ok(None);
} else {
return Err(DecoderError::SymphoniaDecoder(err.to_string()));
}
}
Err(err) => {
return Err(err.into());
}
};
let position_ms = self.ts_to_ms(packet.ts());
let packet_position = AudioPacketPosition {
position_ms,
skipped,
};
match self.decoder.decode(&packet) {
Ok(decoded) => {
let sample_buffer = match self.sample_buffer.as_mut() {
Some(buffer) => buffer,
None => {
let spec = *decoded.spec();
let duration = decoded.capacity() as u64;
self.sample_buffer.insert(SampleBuffer::new(duration, spec))
}
};
sample_buffer.copy_interleaved_ref(decoded);
let samples = AudioPacket::Samples(sample_buffer.samples().to_vec());
return Ok(Some((packet_position, samples)));
}
Err(Error::DecodeError(_)) => {
// The packet failed to decode due to corrupted or invalid data, get a new
// packet and try again.
warn!("Skipping malformed audio packet at {} ms", position_ms);
skipped = true;
continue;
}
Err(err) => return Err(err.into()),
}
}
}
}

View file

@ -201,7 +201,7 @@ impl Mixer for AlsaMixer {
mapped_volume = LogMapping::linear_to_mapped(mapped_volume, self.db_range); mapped_volume = LogMapping::linear_to_mapped(mapped_volume, self.db_range);
} }
self.config.volume_ctrl.to_unmapped(mapped_volume) self.config.volume_ctrl.as_unmapped(mapped_volume)
} }
fn set_volume(&self, volume: u16) { fn set_volume(&self, volume: u16) {

View file

@ -3,7 +3,7 @@ use crate::player::db_to_ratio;
pub trait MappedCtrl { pub trait MappedCtrl {
fn to_mapped(&self, volume: u16) -> f64; fn to_mapped(&self, volume: u16) -> f64;
fn to_unmapped(&self, mapped_volume: f64) -> u16; fn as_unmapped(&self, mapped_volume: f64) -> u16;
fn db_range(&self) -> f64; fn db_range(&self) -> f64;
fn set_db_range(&mut self, new_db_range: f64); fn set_db_range(&mut self, new_db_range: f64);
@ -49,7 +49,7 @@ impl MappedCtrl for VolumeCtrl {
mapped_volume mapped_volume
} }
fn to_unmapped(&self, mapped_volume: f64) -> u16 { fn as_unmapped(&self, mapped_volume: f64) -> u16 {
// More than just an optimization, this ensures that zero mapped volume // More than just an optimization, this ensures that zero mapped volume
// is unmapped to non-negative real numbers (otherwise the log and cubic // is unmapped to non-negative real numbers (otherwise the log and cubic
// equations would respectively return -inf and -1/9.) // equations would respectively return -inf and -1/9.)

View file

@ -26,7 +26,7 @@ impl Mixer for SoftMixer {
fn volume(&self) -> u16 { fn volume(&self) -> u16 {
let mapped_volume = f64::from_bits(self.volume.load(Ordering::Relaxed)); let mapped_volume = f64::from_bits(self.volume.load(Ordering::Relaxed));
self.volume_ctrl.to_unmapped(mapped_volume) self.volume_ctrl.as_unmapped(mapped_volume)
} }
fn set_volume(&self, volume: u16) { fn set_volume(&self, volume: u16) {

Some files were not shown because too many files have changed in this diff Show more