refactor cache
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
This commit is contained in:
parent
6773fee70b
commit
f34e8ba309
5 changed files with 157 additions and 46 deletions
62
Cargo.lock
generated
62
Cargo.lock
generated
|
@ -37,6 +37,8 @@ dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
"tokio",
|
||||||
|
"tower",
|
||||||
|
"tower-http",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
|
@ -385,6 +387,12 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "futures-sink"
|
||||||
|
version = "0.3.30"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "futures-task"
|
name = "futures-task"
|
||||||
version = "0.3.30"
|
version = "0.3.30"
|
||||||
|
@ -522,6 +530,12 @@ dependencies = [
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "http-range-header"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "3ce4ef31cda248bbdb6e6820603b82dfcd9e833db65a43e997a0ccec777d11fe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "httparse"
|
name = "httparse"
|
||||||
version = "1.8.0"
|
version = "1.8.0"
|
||||||
|
@ -658,6 +672,16 @@ version = "0.3.17"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "mime_guess"
|
||||||
|
version = "2.0.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
|
||||||
|
dependencies = [
|
||||||
|
"mime",
|
||||||
|
"unicase",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "minimal-lexical"
|
name = "minimal-lexical"
|
||||||
version = "0.2.1"
|
version = "0.2.1"
|
||||||
|
@ -1231,6 +1255,19 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tokio-util"
|
||||||
|
version = "0.7.10"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15"
|
||||||
|
dependencies = [
|
||||||
|
"bytes",
|
||||||
|
"futures-core",
|
||||||
|
"futures-sink",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tokio",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tower"
|
name = "tower"
|
||||||
version = "0.4.13"
|
version = "0.4.13"
|
||||||
|
@ -1247,6 +1284,31 @@ dependencies = [
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tower-http"
|
||||||
|
version = "0.5.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 2.5.0",
|
||||||
|
"bytes",
|
||||||
|
"futures-util",
|
||||||
|
"http",
|
||||||
|
"http-body",
|
||||||
|
"http-body-util",
|
||||||
|
"http-range-header",
|
||||||
|
"httpdate",
|
||||||
|
"mime",
|
||||||
|
"mime_guess",
|
||||||
|
"percent-encoding",
|
||||||
|
"pin-project-lite",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
|
"tower-layer",
|
||||||
|
"tower-service",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tower-layer"
|
name = "tower-layer"
|
||||||
version = "0.3.2"
|
version = "0.3.2"
|
||||||
|
|
|
@ -14,5 +14,7 @@ moka = { version = "0.12.5", features = ["future"] }
|
||||||
serde = { version = "1.0.197", features = ["derive"] }
|
serde = { version = "1.0.197", features = ["derive"] }
|
||||||
serde_json = "1.0.115"
|
serde_json = "1.0.115"
|
||||||
tokio = { version = "1.37.0", features = ["full"] }
|
tokio = { version = "1.37.0", features = ["full"] }
|
||||||
|
tower = "0.4.13"
|
||||||
|
tower-http = { version = "0.5.2", features = ["fs"]}
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
tracing-subscriber = "0.3.18"
|
tracing-subscriber = "0.3.18"
|
||||||
|
|
|
@ -17,7 +17,7 @@ RUN cargo build --release --bin api-server
|
||||||
|
|
||||||
# We do not need the Rust toolchain to run the binary!
|
# We do not need the Rust toolchain to run the binary!
|
||||||
FROM debian:bookworm-slim AS runtime
|
FROM debian:bookworm-slim AS runtime
|
||||||
RUN apt update && apt install -y libgdal-dev
|
RUN apt update && apt install -y gdal-bin
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=builder /app/target/release/api-server /usr/local/bin
|
COPY --from=builder /app/target/release/api-server /usr/local/bin
|
||||||
ENTRYPOINT ["/usr/local/bin/api-server"]
|
ENTRYPOINT ["/usr/local/bin/api-server"]
|
84
src/dem.rs
84
src/dem.rs
|
@ -1,50 +1,83 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::env;
|
|
||||||
|
|
||||||
use gdal::errors::GdalError;
|
use gdal::errors::GdalError;
|
||||||
use gdal::Dataset;
|
use gdal::Dataset;
|
||||||
|
|
||||||
use tracing::{info, debug_span, debug};
|
use tracing::{debug, debug_span, info};
|
||||||
|
|
||||||
use moka::future::Cache;
|
use moka::future::Cache;
|
||||||
|
|
||||||
pub struct MyDataset{pub ds: Dataset}
|
pub struct MyDataset {
|
||||||
unsafe impl Send for MyDataset{}
|
pub ds: Dataset,
|
||||||
unsafe impl Sync for MyDataset{}
|
}
|
||||||
|
unsafe impl Send for MyDataset {}
|
||||||
|
unsafe impl Sync for MyDataset {}
|
||||||
|
|
||||||
pub type DSC = Cache<String, Arc<MyDataset>>;
|
pub struct DatasetRepository {
|
||||||
pub fn new_cache(max_elems: u64) -> DSC {
|
cache: Cache<String, Arc<MyDataset>>,
|
||||||
Cache::builder()
|
basedir: String,
|
||||||
// Up to 10,000 entries.
|
|
||||||
.max_capacity(max_elems)
|
|
||||||
// Create the cache.
|
|
||||||
.build()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn elevation_from_coordinates(dsc: DSC, lat: f64, lon: f64) -> f64 {
|
unsafe impl Send for DatasetRepository {}
|
||||||
|
unsafe impl Sync for DatasetRepository {}
|
||||||
|
impl DatasetRepository {
|
||||||
|
pub fn new(basedir: String) -> Self {
|
||||||
|
let c = Cache::builder()
|
||||||
|
.max_capacity(100)
|
||||||
|
// Create the cache.
|
||||||
|
.build();
|
||||||
|
|
||||||
|
DatasetRepository { cache: c, basedir }
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get(&self, filename: String) -> Arc<MyDataset> {
|
||||||
|
let full_filename = format!("{}/{filename}", self.basedir);
|
||||||
|
|
||||||
|
if !self.cache.contains_key(&full_filename) {
|
||||||
|
info!("Will open {full_filename} because not in cache!");
|
||||||
|
let ds = Arc::new(MyDataset {
|
||||||
|
ds: Dataset::open(full_filename.clone()).unwrap(),
|
||||||
|
});
|
||||||
|
self.cache.insert(full_filename.clone(), ds).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
match self.cache.get(&full_filename).await {
|
||||||
|
Some(dataset_arc) => dataset_arc,
|
||||||
|
None => panic!("foo")
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for DatasetRepository {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
Self {
|
||||||
|
basedir: self.basedir.clone(),
|
||||||
|
cache: self.cache.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub async fn elevation_from_coordinates(dr: DatasetRepository, lat: f64, lon: f64) -> f64 {
|
||||||
let span = debug_span!("req", lat=%lat, lon=%lon);
|
let span = debug_span!("req", lat=%lat, lon=%lon);
|
||||||
let _guard = span.enter();
|
let _guard = span.enter();
|
||||||
|
|
||||||
let filename = get_filename_from_latlon(lat, lon);
|
let filename = get_filename_from_latlon(lat, lon);
|
||||||
debug!(filename, "filename");
|
debug!(filename, "filename");
|
||||||
|
|
||||||
|
let ds = &dr.get(filename).await.ds;
|
||||||
if !dsc.contains_key(&filename) {
|
|
||||||
info!("Will open {filename} because not in cache!");
|
|
||||||
let ds = Arc::new(MyDataset{ds:Dataset::open(filename.clone()).unwrap()});
|
|
||||||
dsc.insert(filename.clone(), ds).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
let ds = &dsc.get(&filename).await.unwrap().ds;
|
|
||||||
|
|
||||||
let (px, py) = geo_to_pixel(ds, lat, lon).unwrap();
|
let (px, py) = geo_to_pixel(ds, lat, lon).unwrap();
|
||||||
|
|
||||||
let raster_band = ds.rasterband(1).unwrap();
|
let raster_band = ds.rasterband(1).unwrap();
|
||||||
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None).unwrap();
|
let raster_value = raster_band
|
||||||
|
.read_as::<f64>((px, py), (1, 1), (1, 1), None)
|
||||||
|
.unwrap();
|
||||||
raster_value.data[0]
|
raster_value.data[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
||||||
// Calculate the rounded values for latitude and longitude
|
// Calculate the rounded values for latitude and longitude
|
||||||
let rounded_lat = lat.floor();
|
let rounded_lat = lat.floor();
|
||||||
|
@ -58,16 +91,13 @@ fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
||||||
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
|
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
|
||||||
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
|
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
|
||||||
|
|
||||||
|
|
||||||
let filename_prefix = env::var("DEM_LOCATION").unwrap_or("/data".to_string());
|
|
||||||
// Construct the filename
|
// Construct the filename
|
||||||
let filename = format!(
|
let filename = format!(
|
||||||
"{filename_prefix}/Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
"Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
||||||
lat_prefix, lat_deg, lon_prefix, lon_deg
|
lat_prefix, lat_deg, lon_prefix, lon_deg
|
||||||
);
|
);
|
||||||
|
|
||||||
filename
|
filename
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize), GdalError> {
|
fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize), GdalError> {
|
||||||
|
|
49
src/main.rs
49
src/main.rs
|
@ -1,47 +1,64 @@
|
||||||
|
|
||||||
mod dem;
|
mod dem;
|
||||||
|
|
||||||
|
|
||||||
use std::env;
|
|
||||||
use axum_macros::debug_handler;
|
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State}, routing::{get}, Router
|
extract::{Path, State},
|
||||||
|
routing::get,
|
||||||
|
Router,
|
||||||
};
|
};
|
||||||
use dem::{DSC, new_cache};
|
use axum_macros::debug_handler;
|
||||||
|
use std::env;
|
||||||
|
|
||||||
|
use tower_http::services::ServeDir;
|
||||||
|
|
||||||
|
use dem::DatasetRepository;
|
||||||
|
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
|
|
||||||
|
const DEFAULT_DATA_DIR: &str = "/data";
|
||||||
|
const DEFAULT_PORT: &str = "3000";
|
||||||
|
|
||||||
#[tokio::main(flavor = "current_thread")]
|
//#[tokio::main(flavor = "current_thread")]
|
||||||
|
#[tokio::main]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
// initialize tracing
|
// initialize tracing
|
||||||
tracing_subscriber::fmt::init();
|
tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
|
|
||||||
let cache = new_cache(1000);
|
let config = load_config().unwrap();
|
||||||
|
let cache = DatasetRepository::new(config.basedir);
|
||||||
|
|
||||||
|
let serve_dir = ServeDir::new("assets");
|
||||||
|
|
||||||
// build our application with a route
|
// build our application with a route
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.route("/elevation/:lat/:lon", get(get_elevation))
|
.route("/elevation/:lat/:lon", get(get_elevation))
|
||||||
|
.nest_service("/", serve_dir)
|
||||||
.with_state(cache);
|
.with_state(cache);
|
||||||
|
|
||||||
|
let host = format!("[::]:{}", config.port);
|
||||||
let port = env::var("HTTP_PORT").unwrap_or("3000".to_string());
|
|
||||||
let host = format!("[::]:{port}");
|
|
||||||
info!("Will start server on {host}");
|
info!("Will start server on {host}");
|
||||||
|
|
||||||
|
|
||||||
let listener = tokio::net::TcpListener::bind(host).await.unwrap();
|
let listener = tokio::net::TcpListener::bind(host).await.unwrap();
|
||||||
axum::serve(listener, app).await.unwrap();
|
axum::serve(listener, app).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[debug_handler]
|
#[debug_handler]
|
||||||
async fn get_elevation(State(shared): State<DSC>, Path((lat, lon)): Path<(f64, f64)>) -> String{
|
async fn get_elevation(State(dsr): State<DatasetRepository>, Path((lat, lon)): Path<(f64, f64)>) -> String {
|
||||||
|
let ele = dem::elevation_from_coordinates(dsr, lat, lon);
|
||||||
let ele = dem::elevation_from_coordinates(shared, lat, lon);
|
|
||||||
let myele = ele.await;
|
let myele = ele.await;
|
||||||
format!("{myele}")
|
format!("{myele}")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn load_config() -> Result<Config, env::VarError> {
|
||||||
|
Ok(Config {
|
||||||
|
basedir: env::var("DEM_LOCATION").unwrap_or_else(|_| DEFAULT_DATA_DIR.to_string()),
|
||||||
|
port: env::var("HTTP_PORT").unwrap_or_else(|_| DEFAULT_PORT.to_string()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Config {
|
||||||
|
basedir: String,
|
||||||
|
port: String,
|
||||||
|
}
|
Loading…
Reference in a new issue