oauauo
Signed-off-by: Frank Villaro-Dixon <frank@villaro-dixon.eu>
This commit is contained in:
parent
6338feea30
commit
6773fee70b
4 changed files with 69 additions and 93 deletions
1
.dockerignore
Normal file
1
.dockerignore
Normal file
|
@ -0,0 +1 @@
|
||||||
|
data
|
23
Dockerfile
Normal file
23
Dockerfile
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
FROM docker.io/lukemathwalker/cargo-chef:latest-rust-1 AS chef
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
FROM chef AS planner
|
||||||
|
COPY . .
|
||||||
|
RUN cargo chef prepare --recipe-path recipe.json
|
||||||
|
|
||||||
|
FROM chef AS builder
|
||||||
|
RUN apt update && apt install -y clang libgdal-dev
|
||||||
|
RUN apt install -y libclang-dev
|
||||||
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
|
# Build dependencies - this is the caching Docker layer!
|
||||||
|
RUN cargo chef cook --release --recipe-path recipe.json
|
||||||
|
# Build application
|
||||||
|
COPY . .
|
||||||
|
RUN cargo build --release --bin api-server
|
||||||
|
|
||||||
|
# We do not need the Rust toolchain to run the binary!
|
||||||
|
FROM debian:bookworm-slim AS runtime
|
||||||
|
RUN apt update && apt install -y libgdal-dev
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --from=builder /app/target/release/api-server /usr/local/bin
|
||||||
|
ENTRYPOINT ["/usr/local/bin/api-server"]
|
95
src/dem.rs
95
src/dem.rs
|
@ -1,55 +1,47 @@
|
||||||
|
use std::sync::Arc;
|
||||||
|
use std::env;
|
||||||
use std::borrow::{Borrow, BorrowMut};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::sync::{Arc};
|
|
||||||
|
|
||||||
use gdal::errors::GdalError;
|
use gdal::errors::GdalError;
|
||||||
use gdal::Dataset;
|
use gdal::Dataset;
|
||||||
use gdal;
|
|
||||||
use tracing_subscriber::registry::Data;
|
|
||||||
|
|
||||||
use crate::{DSC};
|
use tracing::{info, debug_span, debug};
|
||||||
|
|
||||||
struct Pos {
|
use moka::future::Cache;
|
||||||
lat: f64,
|
|
||||||
lon: f64,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct MyDataset{pub ds: Dataset}
|
pub struct MyDataset{pub ds: Dataset}
|
||||||
unsafe impl Send for MyDataset{}
|
unsafe impl Send for MyDataset{}
|
||||||
unsafe impl Sync for MyDataset{}
|
unsafe impl Sync for MyDataset{}
|
||||||
|
|
||||||
#[derive(Default)]
|
pub type DSC = Cache<String, Arc<MyDataset>>;
|
||||||
pub struct DatasetCache {
|
pub fn new_cache(max_elems: u64) -> DSC {
|
||||||
hm: HashMap<String, MyDataset>
|
Cache::builder()
|
||||||
|
// Up to 10,000 entries.
|
||||||
|
.max_capacity(max_elems)
|
||||||
|
// Create the cache.
|
||||||
|
.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatasetCache {
|
pub async fn elevation_from_coordinates(dsc: DSC, lat: f64, lon: f64) -> f64 {
|
||||||
pub fn new() -> Self {
|
let span = debug_span!("req", lat=%lat, lon=%lon);
|
||||||
let hm = HashMap::<String, MyDataset>::new();
|
let _guard = span.enter();
|
||||||
DatasetCache {
|
|
||||||
hm
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_dataset_for_filename(&mut self, filename: String) -> &MyDataset {
|
let filename = get_filename_from_latlon(lat, lon);
|
||||||
let ret: &MyDataset;
|
debug!(filename, "filename");
|
||||||
|
|
||||||
if !self.hm.contains_key(&filename){
|
|
||||||
let ds = Dataset::open(filename.clone()).unwrap();
|
|
||||||
self.hm.insert(filename.clone(), MyDataset{ds: ds});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
self.hm.get(&filename).unwrap().to_owned()
|
if !dsc.contains_key(&filename) {
|
||||||
/*
|
info!("Will open {filename} because not in cache!");
|
||||||
self.hm.entry(filename.clone()).or_insert_with(|| {
|
let ds = Arc::new(MyDataset{ds:Dataset::open(filename.clone()).unwrap()});
|
||||||
println!("{filename} not in cache!");
|
dsc.insert(filename.clone(), ds).await;
|
||||||
Box::new(Dataset::open(filename).unwrap())
|
|
||||||
})
|
|
||||||
*/
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let ds = &dsc.get(&filename).await.unwrap().ds;
|
||||||
|
|
||||||
|
let (px, py) = geo_to_pixel(ds, lat, lon).unwrap();
|
||||||
|
|
||||||
|
let raster_band = ds.rasterband(1).unwrap();
|
||||||
|
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None).unwrap();
|
||||||
|
raster_value.data[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -66,9 +58,11 @@ fn get_filename_from_latlon(lat: f64, lon: f64) -> String {
|
||||||
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
|
let lat_prefix = if rounded_lat >= 0.0 { "N" } else { "S" };
|
||||||
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
|
let lon_prefix = if rounded_lon >= 0.0 { "E" } else { "W" };
|
||||||
|
|
||||||
|
|
||||||
|
let filename_prefix = env::var("DEM_LOCATION").unwrap_or("/data".to_string());
|
||||||
// Construct the filename
|
// Construct the filename
|
||||||
let filename = format!(
|
let filename = format!(
|
||||||
"Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
"{filename_prefix}/Copernicus_DSM_30_{}{}_00_{}{}_00_DEM.tif",
|
||||||
lat_prefix, lat_deg, lon_prefix, lon_deg
|
lat_prefix, lat_deg, lon_prefix, lon_deg
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -82,30 +76,3 @@ fn geo_to_pixel(dataset: &Dataset, lat: f64, lon: f64) -> Result<(isize, isize),
|
||||||
let y_pixel = ((lat - transform[3]) / transform[5]).round() as isize;
|
let y_pixel = ((lat - transform[3]) / transform[5]).round() as isize;
|
||||||
Ok((x_pixel, y_pixel))
|
Ok((x_pixel, y_pixel))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn elevation_from_coordinates(dsc: DSC, lat: f64, lon: f64) -> f64 {
|
|
||||||
let file = get_filename_from_latlon(lat, lon);
|
|
||||||
let full_filename = format!("data/{file}");
|
|
||||||
|
|
||||||
println!("file for {lat} {lon} is {full_filename}");
|
|
||||||
|
|
||||||
|
|
||||||
if !dsc.contains_key(&full_filename) {
|
|
||||||
println!(">>> WILL GET {full_filename} because not in cache!");
|
|
||||||
let ds = Arc::new(MyDataset{ds:Dataset::open(full_filename.clone()).unwrap()});
|
|
||||||
dsc.insert(full_filename.clone(), ds).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
let ds = &dsc.get(&full_filename).await.unwrap().ds;
|
|
||||||
|
|
||||||
|
|
||||||
println!("This {} is in '{}' and has {} bands.", ds.driver().long_name(), ds.spatial_ref().unwrap().name().unwrap(), ds.raster_count());
|
|
||||||
println!("PRojection: {}", ds.projection());
|
|
||||||
|
|
||||||
|
|
||||||
let (px, py) = geo_to_pixel(&ds, lat, lon).unwrap();
|
|
||||||
|
|
||||||
let raster_band = ds.rasterband(1).unwrap();
|
|
||||||
let raster_value = raster_band.read_as::<f64>((px, py), (1, 1), (1, 1), None).unwrap();
|
|
||||||
raster_value.data[0]
|
|
||||||
}
|
|
41
src/main.rs
41
src/main.rs
|
@ -2,23 +2,16 @@
|
||||||
mod dem;
|
mod dem;
|
||||||
|
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::env;
|
||||||
|
|
||||||
use axum_macros::debug_handler;
|
use axum_macros::debug_handler;
|
||||||
use axum::{
|
use axum::{
|
||||||
extract::{Path, State}, http::StatusCode, routing::{get}, Router
|
extract::{Path, State}, routing::{get}, Router
|
||||||
};
|
};
|
||||||
use dem::{MyDataset};
|
use dem::{DSC, new_cache};
|
||||||
|
|
||||||
use moka::future::Cache;
|
use tracing::info;
|
||||||
|
|
||||||
|
|
||||||
//#[derive(Default)]
|
|
||||||
//struct AppState {
|
|
||||||
// db: RwLock<HashMap<String, Dataset>>,
|
|
||||||
//}
|
|
||||||
|
|
||||||
type DSC = Cache<String, Arc<MyDataset>>;
|
|
||||||
|
|
||||||
#[tokio::main(flavor = "current_thread")]
|
#[tokio::main(flavor = "current_thread")]
|
||||||
async fn main() {
|
async fn main() {
|
||||||
|
@ -26,29 +19,21 @@ async fn main() {
|
||||||
tracing_subscriber::fmt::init();
|
tracing_subscriber::fmt::init();
|
||||||
|
|
||||||
|
|
||||||
// Evict based on the number of entries in the cache.
|
let cache = new_cache(1000);
|
||||||
let cache = Cache::builder()
|
|
||||||
// Up to 10,000 entries.
|
|
||||||
.max_capacity(10_000)
|
|
||||||
// Create the cache.
|
|
||||||
.build();
|
|
||||||
//cache.insert("hello".to_string(), Arc::new(dem::MyDataset{ds: Dataset::open("oueou").unwrap()})).await;
|
|
||||||
|
|
||||||
// build our application with a route
|
// build our application with a route
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
// `GET /` goes to `root`
|
|
||||||
.route("/", get(root))
|
|
||||||
.route("/elevation/:lat/:lon", get(get_elevation))
|
.route("/elevation/:lat/:lon", get(get_elevation))
|
||||||
.with_state(cache);
|
.with_state(cache);
|
||||||
|
|
||||||
// run our app with hyper, listening globally on port 3000
|
|
||||||
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
|
|
||||||
axum::serve(listener, app).await.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
// basic handler that responds with a static string
|
let port = env::var("HTTP_PORT").unwrap_or("3000".to_string());
|
||||||
async fn root() -> &'static str {
|
let host = format!("[::]:{port}");
|
||||||
"Hello, World!"
|
info!("Will start server on {host}");
|
||||||
|
|
||||||
|
|
||||||
|
let listener = tokio::net::TcpListener::bind(host).await.unwrap();
|
||||||
|
axum::serve(listener, app).await.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -57,6 +42,6 @@ async fn get_elevation(State(shared): State<DSC>, Path((lat, lon)): Path<(f64, f
|
||||||
|
|
||||||
let ele = dem::elevation_from_coordinates(shared, lat, lon);
|
let ele = dem::elevation_from_coordinates(shared, lat, lon);
|
||||||
let myele = ele.await;
|
let myele = ele.await;
|
||||||
format!("{lat} {lon} {myele}")
|
format!("{myele}")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue